diff --git a/.dockerignore b/.dockerignore index 29c6c45bb06536..602b46750d3708 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,11 @@ **/node_modules/ -datahub-frontend/build/ -metadata-ingestion/venv/ +*/build/ +*/*/build/ +**/venv/ +**/.tox/ +**/.mypy_cache/ +**/.pytest_cache/ +**/__pycache__/ out **/*.class # Have to copy gradle/wrapper/gradle-wrapper.jar, can't exclude ALL jars diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml new file mode 100644 index 00000000000000..2f677a0e552c23 --- /dev/null +++ b/.github/actions/ci-optimization/action.yml @@ -0,0 +1,92 @@ +name: 'Identify CI Optimizations' +description: 'Determine if code changes are specific to certain modules.' + +outputs: + frontend-only: + description: "Frontend only change" + value: ${{ steps.filter.outputs.frontend == 'true' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'false' }} + ingestion-only: + description: "Ingestion only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'true' && steps.filter.outputs.backend == 'false' }} + backend-only: + description: "Backend only change" + value: ${{ steps.filter.outputs.frontend == 'false' && steps.filter.outputs.ingestion == 'false' && steps.filter.outputs.backend == 'true' }} + backend-change: + description: "Backend code has changed" + value: ${{ steps.filter.outputs.backend == 'true' }} + ingestion-change: + description: "Ingestion code has changed" + value: ${{ steps.filter.outputs.ingestion == 'true' }} + ingestion-base-change: + description: "Ingestion base image docker image has changed" + value: ${{ steps.filter.outputs.ingestion-base == 'true' }} + frontend-change: + description: "Frontend code has changed" + value: ${{ steps.filter.outputs.frontend == 'true' }} + docker-change: + description: "Docker code has changed" + value: ${{ steps.filter.outputs.docker == 'true' }} + kafka-setup-change: + description: "Kafka setup docker change" + value: ${{ steps.filter.outputs.kafka-setup == 'true' }} + mysql-setup-change: + description: "Mysql setup docker change" + value: ${{ steps.filter.outputs.mysql-setup == 'true' }} + postgres-setup-change: + description: "Postgres setup docker change" + value: ${{ steps.filter.outputs.postgres-setup == 'true' }} + elasticsearch-setup-change: + description: "Elasticsearch setup docker change" + value: ${{ steps.filter.outputs.elasticsearch-setup == 'true' }} + smoke-test-change: + description: "Smoke test change" + value: ${{ steps.filter.outputs.smoke-test == 'true' }} +runs: + using: "composite" + steps: + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: | + frontend: + - "datahub-frontend/**" + - "datahub-web-react/**" + - "smoke-test/tests/cypress/**" + - "docker/datahub-frontend/**" + ingestion: + - "metadata-ingestion-modules/**" + - "metadata-ingestion/**" + - "metadata-models/**" + - "smoke-test/**" + - "docker/datahub-ingestion**" + ingestion-base: + - "docker/datahub-ingestion-base/**" + docker: + - "docker/**" + backend: + - ".github/**" + - "metadata-models/**" + - "datahub-upgrade/**" + - "entity-registry/**" + - "li-utils/**" + - "metadata-auth/**" + - "metadata-dao-impl/**" + - "metadata-events/**" + - "metadata-io/**" + - "metadata-jobs/**" + - "metadata-service/**" + - "metadata-utils/**" + - "metadata-operation-context/**" + - "datahub-graphql-core/**" + - "smoke-test/**" + - "docker/**" + kafka-setup: + - "docker/kafka-setup/**" + mysql-setup: + - "docker/mysql-setup/**" + postgres-setup: + - "docker/postgres-setup/**" + elasticsearch-setup: + - "docker/elasticsearch-setup/**" + smoke-test: + - "smoke-test/**" diff --git a/.github/actions/docker-custom-build-and-push/action.yml b/.github/actions/docker-custom-build-and-push/action.yml index bd6bb842b1fb80..1c4a777c14802a 100644 --- a/.github/actions/docker-custom-build-and-push/action.yml +++ b/.github/actions/docker-custom-build-and-push/action.yml @@ -20,7 +20,7 @@ inputs: required: false images: - # e.g. linkedin/datahub-gms + # e.g. acryldata/datahub-gms description: "List of Docker images to use as base name for tags" required: true build-args: @@ -55,7 +55,7 @@ runs: # Code for testing the build when not pushing to Docker Hub. - name: Build and Load image for testing (if not publishing) - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 if: ${{ inputs.publish != 'true' }} with: context: ${{ inputs.context }} @@ -70,27 +70,36 @@ runs: push: false cache-from: type=registry,ref=${{ steps.docker_meta.outputs.tags }} cache-to: type=inline + - name: Single Tag + if: ${{ inputs.publish != 'true' }} + shell: bash + run: | + TAGS=""" + ${{ steps.docker_meta.outputs.tags }} + """ + echo "SINGLE_TAG=$(echo $TAGS | tr '\n' ' ' | awk -F' ' '{ print $1 }')" >> $GITHUB_OUTPUT + id: single_tag - name: Upload image locally for testing (if not publishing) uses: ishworkh/docker-image-artifact-upload@v1 if: ${{ inputs.publish != 'true' }} with: - image: ${{ steps.docker_meta.outputs.tags }} + image: ${{ steps.single_tag.outputs.SINGLE_TAG }} # Code for building multi-platform images and pushing to Docker Hub. - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 if: ${{ inputs.publish == 'true' }} - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 if: ${{ inputs.publish == 'true' }} - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 if: ${{ inputs.publish == 'true' }} with: username: ${{ inputs.username }} password: ${{ inputs.password }} - name: Build and Push Multi-Platform image - uses: docker/build-push-action@v3 + uses: docker/build-push-action@v5 if: ${{ inputs.publish == 'true' }} with: context: ${{ inputs.context }} diff --git a/.github/scripts/check_policies.py b/.github/scripts/check_policies.py index 2ad5f7fff015b2..cc3576e05413c4 100644 --- a/.github/scripts/check_policies.py +++ b/.github/scripts/check_policies.py @@ -20,7 +20,7 @@ elif urn == "urn:li:dataHubPolicy:editor-platform-policy": editor_platform_policy_privileges = policy["info"]["privileges"] elif urn == "urn:li:dataHubPolicy:7": - all_user_platform_policy_privilges = policy["info"]["privileges"] + all_user_platform_policy_privileges = policy["info"]["privileges"] try: doc_type = policy["info"]["type"] privileges = policy["info"]["privileges"] @@ -54,10 +54,22 @@ ) assert len(diff_policies) == 0, f"Missing privileges for root user are {diff_policies}" -diff_policies = set(editor_platform_policy_privileges).difference( - set(all_user_platform_policy_privilges) -) -assert "MANAGE_POLICIES" not in all_user_platform_policy_privilges -assert ( - len(diff_policies) == 0 -), f"Missing privileges for all user policies are {diff_policies}" +# All users privileges checks +assert "MANAGE_POLICIES" not in all_user_platform_policy_privileges +assert "MANAGE_USERS_AND_GROUPS" not in all_user_platform_policy_privileges +assert "MANAGE_SECRETS" not in all_user_platform_policy_privileges +assert "MANAGE_USER_CREDENTIALS" not in all_user_platform_policy_privileges +assert "MANAGE_ACCESS_TOKENS" not in all_user_platform_policy_privileges +assert "EDIT_ENTITY" not in all_user_platform_policy_privileges +assert "DELETE_ENTITY" not in all_user_platform_policy_privileges + +# Editor checks +assert "MANAGE_POLICIES" not in editor_platform_policy_privileges +assert "MANAGE_USERS_AND_GROUPS" not in editor_platform_policy_privileges +assert "MANAGE_SECRETS" not in editor_platform_policy_privileges +assert "MANAGE_USER_CREDENTIALS" not in editor_platform_policy_privileges +assert "MANAGE_ACCESS_TOKENS" not in editor_platform_policy_privileges +# These don't prevent a user from modifying entities they are an asset owner of, i.e. their own profile info +assert "EDIT_CONTACT_INFO" not in editor_platform_policy_privileges +assert "EDIT_USER_PROFILE" not in editor_platform_policy_privileges +assert "EDIT_ENTITY_OWNERS" not in editor_platform_policy_privileges diff --git a/.github/scripts/check_python_package.py b/.github/scripts/check_python_package.py new file mode 100644 index 00000000000000..1b23d8e621ef06 --- /dev/null +++ b/.github/scripts/check_python_package.py @@ -0,0 +1,33 @@ +import setuptools +import os + +folders = ["./smoke-test/tests"] + +for folder in folders: + print(f"Checking folder {folder}") + packages = [i for i in setuptools.find_packages(folder) if "cypress" not in i] + namespace_packages = [ + i for i in setuptools.find_namespace_packages(folder) if "cypress" not in i + ] + + print("Packages found:", packages) + print("Namespace packages found:", namespace_packages) + + in_packages_not_namespace = set(packages) - set(namespace_packages) + in_namespace_not_packages = set(namespace_packages) - set(packages) + + if in_packages_not_namespace: + print(f"Packages not in namespace packages: {in_packages_not_namespace}") + if in_namespace_not_packages: + print(f"Namespace packages not in packages: {in_namespace_not_packages}") + for pkg in in_namespace_not_packages: + pkg_path = os.path.join(folder, pkg.replace(".", os.path.sep)) + print(f"Contents of {pkg_path}:") + print(os.listdir(pkg_path)) + + assert ( + len(in_packages_not_namespace) == 0 + ), f"Found packages in {folder} that are not in namespace packages: {in_packages_not_namespace}" + assert ( + len(in_namespace_not_packages) == 0 + ), f"Found namespace packages in {folder} that are not in packages: {in_namespace_not_packages}" diff --git a/.github/scripts/docker_helpers.sh b/.github/scripts/docker_helpers.sh index f238c5c4091841..e031a6d2a4d843 100755 --- a/.github/scripts/docker_helpers.sh +++ b/.github/scripts/docker_helpers.sh @@ -12,19 +12,19 @@ export SHORT_SHA=$(get_short_sha) echo "SHORT_SHA: $SHORT_SHA" function get_tag { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}\,${SHORT_SHA},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG},g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1,g'),${SHORT_SHA} } function get_tag_slim { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim\,${SHORT_SHA}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g'),${SHORT_SHA}-slim } function get_tag_full { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full\,${SHORT_SHA}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${MAIN_BRANCH_TAG}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g'),${SHORT_SHA}-full } function get_python_docker_release_v { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},0.0.0+docker.${SHORT_SHA},g" -e 's,refs/tags/v\(.*\),\1+docker,g' -e 's,refs/pull/\([0-9]*\).*,0.0.0+docker.pr\1,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},1!0.0.0+docker.${SHORT_SHA},g" -e 's,refs/tags/v\(.*\),1!\1+docker,g' -e 's,refs/pull/\([0-9]*\).*,1!0.0.0+docker.pr\1,g') } function get_unique_tag { @@ -32,9 +32,9 @@ function get_unique_tag { } function get_unique_tag_slim { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-slim,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g') + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-slim,g" -e 's,refs/tags/\(.*\),\1-slim,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-slim,g') } function get_unique_tag_full { - echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/,,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') -} \ No newline at end of file + echo $(echo ${GITHUB_REF} | sed -e "s,refs/heads/${MAIN_BRANCH},${SHORT_SHA}-full,g" -e 's,refs/tags/\(.*\),\1-full,g' -e 's,refs/pull/\([0-9]*\).*,pr\1-full,g') +} diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index 54042d104d9066..d4f0a1369da253 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -12,7 +12,7 @@ on: branches: - "**" paths: - - ".github/**" + - ".github/workflows/airflow-plugin.yml" - "metadata-ingestion-modules/airflow-plugin/**" - "metadata-ingestion/**" - "metadata-models/**" @@ -32,6 +32,7 @@ jobs: strategy: matrix: include: + # Note: this should be kept in sync with tox.ini. - python-version: "3.8" extra_pip_requirements: "apache-airflow~=2.1.4" extra_pip_extras: plugin-v1 @@ -39,17 +40,29 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.4.0" + extra_pip_requirements: "apache-airflow~=2.4.3" + extra_pip_extras: plugin-v2,test-airflow24 + - python-version: "3.10" + extra_pip_requirements: 'apache-airflow~=2.6.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.6.3/constraints-3.10.txt' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.6.0" + extra_pip_requirements: 'apache-airflow~=2.7.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.7.3/constraints-3.10.txt' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0" + extra_pip_requirements: 'apache-airflow~=2.8.1 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.8.1/constraints-3.10.txt' + extra_pip_extras: plugin-v2 + - python-version: "3.11" + extra_pip_requirements: 'apache-airflow~=2.9.3 -c https://raw.githubusercontent.com/apache/airflow/constraints-2.9.3/constraints-3.11.txt' extra_pip_extras: plugin-v2 fail-fast: false steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -57,7 +70,7 @@ jobs: - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Install airflow package and test (extras ${{ matrix.extra_pip_requirements }}) - run: ./gradlew -Pextra_pip_requirements='${{ matrix.extra_pip_requirements }}' -Pextra_pip_extras='${{ matrix.extra_pip_extras }}' :metadata-ingestion-modules:airflow-plugin:lint :metadata-ingestion-modules:airflow-plugin:testQuick + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extra_pip_requirements }}' -Pextra_pip_extras='${{ matrix.extra_pip_extras }}' :metadata-ingestion-modules:airflow-plugin:build - name: pip freeze show list installed if: always() run: source metadata-ingestion-modules/airflow-plugin/venv/bin/activate && pip freeze @@ -69,6 +82,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Upload coverage to Codecov if: always() uses: codecov/codecov-action@v3 @@ -76,8 +90,8 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} directory: . fail_ci_if_error: false - flags: airflow-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} - name: pytest-airflow + flags: airflow,airflow-${{ matrix.extra_pip_extras }} + name: pytest-airflow-${{ matrix.python-version }}-${{ matrix.extra_pip_requirements }} verbose: true event-file: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 25f3957e8f0861..c93267947b65a8 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -20,6 +20,25 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - uses: ./.github/actions/ci-optimization + id: ci-optimize + build: strategy: fail-fast: false @@ -27,38 +46,59 @@ jobs: command: [ # metadata-ingestion and airflow-plugin each have dedicated build jobs "except_metadata_ingestion", - "frontend" + "frontend", ] - timezone: ["UTC", "America/New_York"] + timezone: ["UTC"] + include: + # We only need the timezone variation for frontend tests. + - command: "frontend" + timezone: "America/New_York" runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - - uses: szenius/set-timezone@v1.0 + - uses: szenius/set-timezone@v1.2 with: timezoneLinux: ${{ matrix.timezone }} - - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - name: Set up JDK 11 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" cache: pip - - name: Gradle build (and test) for metadata ingestion - # we only need the timezone runs for frontend tests - if: ${{ matrix.command == 'except_metadata_ingestion' && matrix.timezone == 'America/New_York' }} + - name: Gradle build (and test) for NOT metadata ingestion + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} run: | - ./gradlew build -x :metadata-ingestion:build -x :metadata-ingestion:check -x docs-website:build -x :metadata-integration:java:spark-lineage:test -x :metadata-io:test -x :metadata-ingestion-modules:airflow-plugin:build -x :metadata-ingestion-modules:airflow-plugin:check -x :datahub-frontend:build -x :datahub-web-react:build --parallel + ./gradlew build \ + -x :metadata-ingestion:build \ + -x :metadata-ingestion:check \ + -x :docs-website:build \ + -x :metadata-integration:java:spark-lineage:test \ + -x :metadata-io:test \ + -x :metadata-ingestion-modules:airflow-plugin:build \ + -x :metadata-ingestion-modules:airflow-plugin:check \ + -x :metadata-ingestion-modules:dagster-plugin:build \ + -x :metadata-ingestion-modules:dagster-plugin:check \ + -x :datahub-frontend:build \ + -x :datahub-web-react:build \ + --parallel - name: Gradle build (and test) for frontend - if: ${{ matrix.command == 'frontend' }} + if: ${{ matrix.command == 'frontend' && needs.setup.outputs.frontend_change == 'true' }} run: | ./gradlew :datahub-frontend:build :datahub-web-react:build --parallel env: NODE_OPTIONS: "--max-old-space-size=3072" + - name: Gradle compile (jdk8) for legacy Spark + if: ${{ matrix.command == 'except_metadata_ingestion' && needs.setup.outputs.backend_change == 'true' }} + run: | + ./gradlew -PjavaClassVersionDefault=8 :metadata-integration:java:spark-lineage:compileJava - uses: actions/upload-artifact@v3 if: always() with: @@ -67,27 +107,20 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 - with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities quickstart-compose-validation: runs-on: ubuntu-latest + needs: setup + if: ${{ needs.setup.outputs.docker_change == 'true' }} steps: - - uses: actions/checkout@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Download YQ - uses: chrisdickinson/setup-yq@v1.0.1 - with: - yq-version: v4.28.2 - name: Quickstart Compose Validation run: ./docker/quickstart/generate_and_compare.sh diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 9a17a70e7f8d41..b45d130e0494dd 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -4,17 +4,13 @@ on: push: branches: - master - paths-ignore: - - "docker/**" - - "docs/**" - - "**.md" + paths: + - "metadata-integration" pull_request: branches: - "**" - paths-ignore: - - "docker/**" - - "docs/**" - - "**.md" + paths: + - "metadata-integration" release: types: [published] @@ -31,14 +27,13 @@ jobs: command: ["datahub-client", "datahub-protobuf", "spark-lineage"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/code-checks.yml b/.github/workflows/code-checks.yml index e12971b8a62084..b3b94cc40a2fdd 100644 --- a/.github/workflows/code-checks.yml +++ b/.github/workflows/code-checks.yml @@ -31,9 +31,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/dagster-plugin.yml b/.github/workflows/dagster-plugin.yml new file mode 100644 index 00000000000000..48f1b24196c9e0 --- /dev/null +++ b/.github/workflows/dagster-plugin.yml @@ -0,0 +1,85 @@ +name: Dagster Plugin +on: + push: + branches: + - master + paths: + - ".github/workflows/dagster-plugin.yml" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + pull_request: + branches: + - master + paths: + - ".github/**" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + release: + types: [published] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + dagster-plugin: + runs-on: ubuntu-latest + env: + SPARK_VERSION: 3.0.3 + DATAHUB_TELEMETRY_ENABLED: false + strategy: + matrix: + python-version: ["3.8", "3.10"] + include: + - python-version: "3.8" + extraPythonRequirement: "dagster>=1.3.3" + - python-version: "3.10" + extraPythonRequirement: "dagster>=1.3.3" + fail-fast: false + steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh + - name: Install dagster package and test (extras ${{ matrix.extraPythonRequirement }}) + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion-modules:dagster-plugin:lint :metadata-ingestion-modules:dagster-plugin:testQuick + - name: pip freeze show list installed + if: always() + run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && pip freeze + - uses: actions/upload-artifact@v3 + if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }} + with: + name: Test Results (dagster Plugin ${{ matrix.python-version}}) + path: | + **/build/reports/tests/test/** + **/build/test-results/test/** + **/junit.*.xml + - name: Upload coverage to Codecov + if: always() + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: . + fail_ci_if_error: false + flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + name: pytest-dagster + verbose: true + + event-file: + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/docker-ingestion-smoke.yml b/.github/workflows/docker-ingestion-smoke.yml index 8d52c237928577..e3c37b45434ab1 100644 --- a/.github/workflows/docker-ingestion-smoke.yml +++ b/.github/workflows/docker-ingestion-smoke.yml @@ -25,7 +25,7 @@ jobs: python_release_version: ${{ steps.python_release_version.outputs.release_version }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -47,11 +47,10 @@ jobs: name: Build and Push Docker Image to Docker Hub runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-postgres-setup.yml b/.github/workflows/docker-postgres-setup.yml index fda4349f90bf7c..956f3f7b1c3903 100644 --- a/.github/workflows/docker-postgres-setup.yml +++ b/.github/workflows/docker-postgres-setup.yml @@ -27,7 +27,7 @@ jobs: publish: ${{ steps.publish.outputs.publish }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -46,9 +46,7 @@ jobs: needs: setup steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 8666a5e2e2171b..9487e71e8da3d1 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -3,15 +3,14 @@ on: push: branches: - master - paths-ignore: - - "docs/**" - - "**.md" pull_request: branches: - "**" - paths-ignore: - - "docs/**" - - "**.md" + types: + - labeled + - opened + - synchronize + - reopened release: types: [published] @@ -23,12 +22,12 @@ concurrency: cancel-in-progress: true env: - DATAHUB_GMS_IMAGE: "linkedin/datahub-gms" - DATAHUB_FRONTEND_IMAGE: "linkedin/datahub-frontend-react" - DATAHUB_MAE_CONSUMER_IMAGE: "linkedin/datahub-mae-consumer" - DATAHUB_MCE_CONSUMER_IMAGE: "linkedin/datahub-mce-consumer" - DATAHUB_KAFKA_SETUP_IMAGE: "linkedin/datahub-kafka-setup" - DATAHUB_ELASTIC_SETUP_IMAGE: "linkedin/datahub-elasticsearch-setup" + DATAHUB_GMS_IMAGE: "acryldata/datahub-gms" + DATAHUB_FRONTEND_IMAGE: "acryldata/datahub-frontend-react" + DATAHUB_MAE_CONSUMER_IMAGE: "acryldata/datahub-mae-consumer" + DATAHUB_MCE_CONSUMER_IMAGE: "acryldata/datahub-mce-consumer" + DATAHUB_KAFKA_SETUP_IMAGE: "acryldata/datahub-kafka-setup" + DATAHUB_ELASTIC_SETUP_IMAGE: "acryldata/datahub-elasticsearch-setup" DATAHUB_MYSQL_SETUP_IMAGE: "acryldata/datahub-mysql-setup" DATAHUB_UPGRADE_IMAGE: "acryldata/datahub-upgrade" DATAHUB_INGESTION_BASE_IMAGE: "acryldata/datahub-ingestion-base" @@ -44,15 +43,32 @@ jobs: unique_tag: ${{ steps.tag.outputs.unique_tag }} unique_slim_tag: ${{ steps.tag.outputs.unique_slim_tag }} unique_full_tag: ${{ steps.tag.outputs.unique_full_tag }} + docker-login: ${{ steps.docker-login.outputs.docker-login }} publish: ${{ steps.publish.outputs.publish }} + pr-publish: ${{ steps.pr-publish.outputs.publish }} python_release_version: ${{ steps.tag.outputs.python_release_version }} + short_sha: ${{ steps.tag.outputs.short_sha }} + branch_name: ${{ steps.tag.outputs.branch_name }} + repository_name: ${{ steps.tag.outputs.repository_name }} + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + ingestion_base_change: ${{ steps.ci-optimize.outputs.ingestion-base-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + backend_only: ${{ steps.ci-optimize.outputs.backend-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} steps: - - name: Checkout - uses: actions/checkout@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | source .github/scripts/docker_helpers.sh + echo "short_sha=${SHORT_SHA}" >> $GITHUB_OUTPUT echo "tag=$(get_tag)" >> $GITHUB_OUTPUT echo "slim_tag=$(get_tag_slim)" >> $GITHUB_OUTPUT echo "full_tag=$(get_tag_full)" >> $GITHUB_OUTPUT @@ -60,23 +76,78 @@ jobs: echo "unique_slim_tag=$(get_unique_tag_slim)" >> $GITHUB_OUTPUT echo "unique_full_tag=$(get_unique_tag_full)" >> $GITHUB_OUTPUT echo "python_release_version=$(get_python_docker_release_v)" >> $GITHUB_OUTPUT + echo "branch_name=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT + echo "repository_name=${GITHUB_REPOSITORY#*/}" >> $GITHUB_OUTPUT + - name: Check whether docker login is possible + id: docker-login + env: + ENABLE_DOCKER_LOGIN: ${{ secrets.ACRYL_DOCKER_PASSWORD != '' }} + run: | + echo "Enable Docker Login: ${{ env.ENABLE_DOCKER_LOGIN }}" + echo "docker-login=${{ env.ENABLE_DOCKER_LOGIN }}" >> $GITHUB_OUTPUT - name: Check whether publishing enabled id: publish env: - ENABLE_PUBLISH: ${{ secrets.DOCKER_PASSWORD != '' && secrets.ACRYL_DOCKER_PASSWORD != '' }} + ENABLE_PUBLISH: >- + ${{ + github.event_name != 'pull_request' + && ( secrets.ACRYL_DOCKER_PASSWORD != '' ) + }} run: | echo "Enable publish: ${{ env.ENABLE_PUBLISH }}" echo "publish=${{ env.ENABLE_PUBLISH }}" >> $GITHUB_OUTPUT + - name: Check whether PR publishing enabled + id: pr-publish + env: + ENABLE_PUBLISH: >- + ${{ + (github.event_name == 'pull_request' && (contains(github.event.pull_request.labels.*.name, 'publish') || contains(github.event.pull_request.labels.*.name, 'publish-docker'))) + && ( secrets.ACRYL_DOCKER_PASSWORD != '' ) + }} + run: | + echo "Enable PR publish: ${{ env.ENABLE_PUBLISH }}" + echo "publish=${{ env.ENABLE_PUBLISH }}" >> $GITHUB_OUTPUT + - uses: ./.github/actions/ci-optimization + id: ci-optimize + - uses: actions/setup-python@v4 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + python-version: "3.10" + cache: "pip" + - uses: actions/cache@v4 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} + - name: Set up JDK 17 + uses: actions/setup-java@v3 + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Run lint on smoke test + if: ${{ steps.ci-optimize.outputs.smoke-test-change == 'true' }} + run: | + python ./.github/scripts/check_python_package.py + ./gradlew :smoke-test:pythonLint + ./gradlew :smoke-test:cypressLint gms_build: name: Build and Push DataHub GMS Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - fetch-depth: 800 + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-service:war:build -x test --parallel @@ -87,9 +158,9 @@ jobs: images: | ${{ env.DATAHUB_GMS_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-gms/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -101,12 +172,13 @@ jobs: name: "[Monitoring] Scan GMS images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, gms_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner @@ -130,11 +202,16 @@ jobs: name: Build and Push DataHub MAE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - fetch-depth: 800 + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-jobs:mae-consumer-job:build -x test --parallel @@ -145,9 +222,9 @@ jobs: images: | ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-mae-consumer/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -155,16 +232,17 @@ jobs: name: "[Monitoring] Scan MAE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mae_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner @@ -188,11 +266,16 @@ jobs: name: Build and Push DataHub MCE Consumer Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - fetch-depth: 800 + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :metadata-jobs:mce-consumer-job:build -x test --parallel @@ -203,9 +286,9 @@ jobs: images: | ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-mce-consumer/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -213,16 +296,17 @@ jobs: name: "[Monitoring] Scan MCE consumer images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, mce_consumer_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner @@ -246,11 +330,16 @@ jobs: name: Build and Push DataHub Upgrade Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - fetch-depth: 800 + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-upgrade:build -x test --parallel @@ -263,7 +352,7 @@ jobs: tags: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-upgrade/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -271,16 +360,17 @@ jobs: name: "[Monitoring] Scan DataHub Upgrade images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_upgrade_build] + if: ${{ needs.setup.outputs.backend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner @@ -304,11 +394,16 @@ jobs: name: Build and Push DataHub Frontend Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - fetch-depth: 800 + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Pre-build artifacts for docker image run: | ./gradlew :datahub-frontend:dist -x test -x yarnTest -x yarnLint --parallel @@ -321,9 +416,9 @@ jobs: images: | ${{ env.DATAHUB_FRONTEND_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-frontend/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -331,6 +426,7 @@ jobs: name: "[Monitoring] Scan Frontend images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, frontend_build] + if: ${{ needs.setup.outputs.frontend_change == 'true' || needs.setup.outputs.publish == 'true' }} permissions: contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/upload-sarif to upload SARIF results @@ -340,7 +436,7 @@ jobs: uses: actions/checkout@v3 - name: Download image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Run Trivy vulnerability scanner @@ -364,20 +460,19 @@ jobs: name: Build and Push DataHub Kafka Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.kafka_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: images: | ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/kafka-setup/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -386,11 +481,10 @@ jobs: name: Build and Push DataHub MySQL Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.mysql_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: @@ -399,7 +493,7 @@ jobs: tags: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/mysql-setup/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -408,20 +502,19 @@ jobs: name: Build and Push DataHub Elasticsearch Setup Docker Image runs-on: ubuntu-latest needs: setup + if: ${{ needs.setup.outputs.elasticsearch_setup_change == 'true' || (needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true') }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 + uses: acryldata/sane-checkout-action@v3 - name: Build and push uses: ./.github/actions/docker-custom-build-and-push with: images: | ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }} tags: ${{ needs.setup.outputs.tag }} - username: ${{ secrets.DOCKER_USERNAME }} - password: ${{ secrets.DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/elasticsearch-setup/Dockerfile platforms: linux/amd64,linux/arm64/v8 @@ -432,19 +525,12 @@ jobs: outputs: tag: ${{ steps.tag.outputs.tag }} needs: setup + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - datahub-ingestion-base: - - 'docker/datahub-ingestion-base/**' + uses: acryldata/sane-checkout-action@v3 - name: Build and push Base Image - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.ingestion_base_change == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: base @@ -453,37 +539,36 @@ jobs: tags: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-ingestion-base/Dockerfile platforms: linux/amd64,linux/arm64/v8 - name: Compute DataHub Ingestion (Base) Tag id: tag - run: echo "tag=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }}" >> $GITHUB_OUTPUT datahub_ingestion_base_slim_build: name: Build and Push DataHub Ingestion (Base-Slim) Docker Image runs-on: ubuntu-latest outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - datahub-ingestion-base: - - 'docker/datahub-ingestion-base/**' + uses: acryldata/sane-checkout-action@v3 - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' && steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: - image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} + image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} + - name: Login to DockerHub + uses: docker/login-action@v3 + if: ${{ needs.setup.outputs.docker-login == 'true' && needs.setup.outputs.publish == 'false' && needs.setup.outputs.pr-publish == 'false' && needs.setup.outputs.ingestion_base_change == 'false' }} + with: + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push Base-Slim Image - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.ingestion_base_change == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: slim-install @@ -494,38 +579,37 @@ jobs: password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} build-args: | APP_ENV=slim - BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} - publish: ${{ needs.setup.outputs.publish }} + BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-ingestion-base/Dockerfile platforms: linux/amd64,linux/arm64/v8 - name: Compute DataHub Ingestion (Base-Slim) Tag id: tag - run: echo "tag=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT datahub_ingestion_base_full_build: name: Build and Push DataHub Ingestion (Base-Full) Docker Image runs-on: ubuntu-latest outputs: tag: ${{ steps.tag.outputs.tag }} needs: [setup, datahub_ingestion_base_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: | - datahub-ingestion-base: - - 'docker/datahub-ingestion-base/**' + uses: acryldata/sane-checkout-action@v3 - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' && steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: - image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} + image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} + - name: Login to DockerHub + uses: docker/login-action@v3 + if: ${{ needs.setup.outputs.docker-login == 'true' && needs.setup.outputs.publish == 'false' && needs.setup.outputs.pr-publish == 'false' && needs.setup.outputs.ingestion_base_change == 'false' }} + with: + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push (Base-Full) Image - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.ingestion_base_change == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: full-install @@ -536,45 +620,48 @@ jobs: password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} build-args: | APP_ENV=full - BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} - publish: ${{ needs.setup.outputs.publish }} + BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-ingestion-base/Dockerfile platforms: linux/amd64,linux/arm64/v8 - name: Compute DataHub Ingestion (Base-Full) Tag id: tag - run: echo "tag=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_full_tag || 'head' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_full_tag || 'head' }}" >> $GITHUB_OUTPUT datahub_ingestion_slim_build: name: Build and Push DataHub Ingestion Docker Images runs-on: ubuntu-latest outputs: tag: ${{ steps.tag.outputs.tag }} - needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} + needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true') }} needs: [setup, datahub_ingestion_base_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - uses: dorny/paths-filter@v2 - id: filter + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - filters: | - datahub-ingestion-base: - - 'docker/datahub-ingestion-base/**' - datahub-ingestion: - - 'docker/datahub-ingestion/**' + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Build codegen - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' && steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: - image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} + image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} + - name: Login to DockerHub + uses: docker/login-action@v3 + if: ${{ needs.setup.outputs.docker-login == 'true' && needs.setup.outputs.publish == 'false' && needs.setup.outputs.pr-publish == 'false' && needs.setup.outputs.ingestion_base_change == 'false' }} + with: + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push Slim Image - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: final @@ -582,19 +669,19 @@ jobs: ${{ env.DATAHUB_INGESTION_IMAGE }} build-args: | BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }} - DOCKER_VERSION=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} + DOCKER_VERSION=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }} RELEASE_VERSION=${{ needs.setup.outputs.python_release_version }} APP_ENV=slim tags: ${{ needs.setup.outputs.slim_tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-ingestion/Dockerfile platforms: linux/amd64,linux/arm64/v8 - name: Compute Tag id: tag - run: echo "tag=${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ needs.setup.outputs.ingestion_change == 'true' && needs.setup.outputs.unique_slim_tag || 'head-slim' }}" >> $GITHUB_OUTPUT datahub_ingestion_slim_scan: permissions: contents: read # for actions/checkout to fetch code @@ -603,9 +690,10 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion Slim images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_slim_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image Slim Image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} @@ -633,31 +721,34 @@ jobs: runs-on: ubuntu-latest outputs: tag: ${{ steps.tag.outputs.tag }} - needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} + needs_artifact_download: ${{ needs.setup.outputs.ingestion_change == 'true' && ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) }} needs: [setup, datahub_ingestion_base_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - - name: Check out the repo - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - uses: dorny/paths-filter@v2 - id: filter + - name: Set up JDK 17 + uses: actions/setup-java@v3 with: - filters: | - datahub-ingestion-base: - - 'docker/datahub-ingestion-base/**' - datahub-ingestion: - - 'docker/datahub-ingestion/**' + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 - name: Build codegen - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} run: ./gradlew :metadata-ingestion:codegen - name: Download Base Image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' && steps.filter.outputs.datahub-ingestion-base == 'true' }} + if: ${{ needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' && needs.setup.outputs.ingestion_base_change == 'true' }} with: - image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} + image: ${{ env.DATAHUB_INGESTION_BASE_IMAGE }}:${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} + - name: Login to DockerHub + uses: docker/login-action@v3 + if: ${{ needs.setup.outputs.docker-login == 'true' && needs.setup.outputs.publish == 'false' && needs.setup.outputs.pr-publish == 'false' && needs.setup.outputs.ingestion_base_change == 'false' }} + with: + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Build and push Full Image - if: ${{ steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true' || needs.setup.outputs.publish }} + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} uses: ./.github/actions/docker-custom-build-and-push with: target: final @@ -665,18 +756,18 @@ jobs: ${{ env.DATAHUB_INGESTION_IMAGE }} build-args: | BASE_IMAGE=${{ env.DATAHUB_INGESTION_BASE_IMAGE }} - DOCKER_VERSION=${{ steps.filter.outputs.datahub-ingestion-base == 'true' && needs.setup.outputs.unique_tag || 'head' }} + DOCKER_VERSION=${{ needs.setup.outputs.ingestion_base_change == 'true' && needs.setup.outputs.unique_tag || 'head' }} RELEASE_VERSION=${{ needs.setup.outputs.python_release_version }} tags: ${{ needs.setup.outputs.tag }} username: ${{ secrets.ACRYL_DOCKER_USERNAME }} password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - publish: ${{ needs.setup.outputs.publish }} + publish: ${{ needs.setup.outputs.publish == 'true' || needs.setup.outputs.pr-publish == 'true' }} context: . file: ./docker/datahub-ingestion/Dockerfile platforms: linux/amd64,linux/arm64/v8 - name: Compute Tag (Full) id: tag - run: echo "tag=${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.unique_tag || 'head' }}" >> $GITHUB_OUTPUT + run: echo "tag=${{ needs.setup.outputs.ingestion_change == 'true' && needs.setup.outputs.unique_tag || 'head' }}" >> $GITHUB_OUTPUT datahub_ingestion_full_scan: permissions: contents: read # for actions/checkout to fetch code @@ -685,9 +776,10 @@ jobs: name: "[Monitoring] Scan Datahub Ingestion images for vulnerabilities" runs-on: ubuntu-latest needs: [setup, datahub_ingestion_full_build] + if: ${{ needs.setup.outputs.ingestion_change == 'true' || needs.setup.outputs.publish == 'true' }} steps: - name: Checkout # adding checkout step just to make trivy upload happy - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Download image Full Image uses: ishworkh/docker-image-artifact-download@v1 if: ${{ needs.datahub_ingestion_full_build.outputs.needs_artifact_download == 'true' }} @@ -710,16 +802,31 @@ jobs: with: sarif_file: "trivy-results.sarif" + smoke_test_matrix: + runs-on: ubuntu-latest + needs: setup + outputs: + matrix: ${{ steps.set-matrix.outputs.matrix }} + steps: + - id: set-matrix + run: | + if [ '${{ needs.setup.outputs.frontend_only }}' == 'true' ]; then + echo 'matrix=["cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.ingestion_only }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1"]' >> $GITHUB_OUTPUT + elif [ '${{ needs.setup.outputs.backend_change }}' == 'true' ]; then + echo 'matrix=["no_cypress_suite0","no_cypress_suite1","cypress_suite1","cypress_rest"]' >> $GITHUB_OUTPUT + else + echo 'matrix=[]' >> $GITHUB_OUTPUT + fi + smoke_test: name: Run Smoke Tests runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - test_strategy: ["no_cypress", "cypress_suite1", "cypress_rest"] needs: [ setup, + smoke_test_matrix, gms_build, frontend_build, kafka_setup_build, @@ -730,16 +837,27 @@ jobs: datahub_upgrade_build, datahub_ingestion_slim_build, ] + strategy: + fail-fast: false + matrix: + test_strategy: ${{ fromJson(needs.smoke_test_matrix.outputs.matrix) }} + if: ${{ always() && !failure() && !cancelled() && needs.smoke_test_matrix.outputs.matrix != '[]' }} steps: + - name: Free up disk space + run: | + sudo apt-get remove 'dotnet-*' azure-cli || true + sudo rm -rf /usr/local/lib/android/ || true + sudo docker image prune -a -f || true - name: Disk Check run: df -h . && docker images - name: Check out the repo - uses: actions/checkout@v3 - - name: Set up JDK 11 + uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -749,6 +867,12 @@ jobs: - name: Build datahub cli run: | ./gradlew :metadata-ingestion:install + - name: Login to DockerHub + uses: docker/login-action@v3 + if: ${{ needs.setup.outputs.docker-login == 'true' }} + with: + username: ${{ secrets.ACRYL_DOCKER_USERNAME }} + password: ${{ secrets.ACRYL_DOCKER_PASSWORD }} - name: Disk Check run: df -h . && docker images - name: Remove images @@ -757,66 +881,105 @@ jobs: run: df -h . && docker images - name: Download GMS image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.gms_build.result == 'success' }} with: image: ${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Frontend image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.frontend_build.result == 'success' }} with: image: ${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Kafka Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.kafka_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Mysql Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mysql_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download Elastic Setup image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.elasticsearch_setup_build.result == 'success' }} with: image: ${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MCE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mce_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download MAE Consumer image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.mae_consumer_build.result == 'success' }} with: image: ${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download upgrade image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.setup.outputs.publish != 'true' }} + if: ${{ ( needs.setup.outputs.publish != 'true' && needs.setup.outputs.pr-publish != 'true' ) && needs.datahub_upgrade_build.result == 'success' }} with: image: ${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }} - name: Download datahub-ingestion-slim image uses: ishworkh/docker-image-artifact-download@v1 - if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' }} + if: ${{ needs.datahub_ingestion_slim_build.outputs.needs_artifact_download == 'true' && needs.datahub_ingestion_slim_build.result == 'success' }} with: image: ${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }} - name: Disk Check run: df -h . && docker images + - name: CI Optimization Head Images + # When publishing all tests/images are built (no optimizations) + if: ${{ needs.setup.outputs.publish != 'true' }} + run: | + if [ '${{ needs.setup.outputs.backend_change }}' == 'false' ]; then + echo 'GMS/Upgrade/MCE/MAE head images' + docker pull '${{ env.DATAHUB_GMS_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' + docker pull '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' + docker tag '${{ env.DATAHUB_GMS_IMAGE }}:head' '${{ env.DATAHUB_GMS_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MCE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:head' '${{ env.DATAHUB_MAE_CONSUMER_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + docker tag '${{ env.DATAHUB_UPGRADE_IMAGE }}:head' '${{ env.DATAHUB_UPGRADE_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.frontend_change }}' == 'false' ]; then + echo 'Frontend head images' + docker pull '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' + docker tag '${{ env.DATAHUB_FRONTEND_IMAGE }}:head' '${{ env.DATAHUB_FRONTEND_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.kafka_setup_change }}' == 'false' ]; then + echo 'kafka-setup head images' + docker pull '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:head' '${{ env.DATAHUB_KAFKA_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.mysql_setup_change }}' == 'false' ]; then + echo 'mysql-setup head images' + docker pull '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:head' '${{ env.DATAHUB_MYSQL_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.elasticsearch_setup_change }}' == 'false' ]; then + echo 'elasticsearch-setup head images' + docker pull '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' + docker tag '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:head' '${{ env.DATAHUB_ELASTIC_SETUP_IMAGE }}:${{ needs.setup.outputs.unique_tag }}' + fi + if [ '${{ needs.setup.outputs.ingestion_change }}' == 'false' ]; then + echo 'datahub-ingestion head-slim images' + docker pull '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' + if [ '${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }}' != 'head-slim' ]; then + docker tag '${{ env.DATAHUB_INGESTION_IMAGE }}:head-slim' '${{ env.DATAHUB_INGESTION_IMAGE }}:${{ needs.datahub_ingestion_slim_build.outputs.tag }}' + fi + fi + - name: Disk Check + run: df -h . && docker images - name: run quickstart env: DATAHUB_TELEMETRY_ENABLED: false DATAHUB_VERSION: ${{ needs.setup.outputs.unique_tag }} DATAHUB_ACTIONS_IMAGE: ${{ env.DATAHUB_INGESTION_IMAGE }} - ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag }} + ACTIONS_VERSION: ${{ needs.datahub_ingestion_slim_build.outputs.tag || 'head-slim' }} ACTIONS_EXTRA_PACKAGES: "acryl-datahub-actions[executor]==0.0.13 acryl-datahub-actions==0.0.13 acryl-datahub==0.10.5" ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | ./smoke-test/run-quickstart.sh - - name: sleep 60s - run: | - # we are doing this because gms takes time to get ready - # and we don't have a better readiness check when bootstrap is done - sleep 60s - name: Disk Check run: df -h . && docker images - name: Disable ES Disk Threshold @@ -851,8 +1014,15 @@ jobs: if: failure() run: | docker ps -a - docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log - docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log + docker logs datahub-datahub-gms-1 >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-actions-1 >& actions-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-mae-consumer-1 >& mae-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-mce-consumer-1 >& mce-${{ matrix.test_strategy }}.log || true + docker logs datahub-broker-1 >& broker-${{ matrix.test_strategy }}.log || true + docker logs datahub-mysql-1 >& mysql-${{ matrix.test_strategy }}.log || true + docker logs datahub-elasticsearch-1 >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-datahub-frontend-react-1 >& frontend-${{ matrix.test_strategy }}.log || true + docker logs datahub-upgrade-1 >& upgrade-${{ matrix.test_strategy }}.log || true - name: Upload logs uses: actions/upload-artifact@v3 if: failure() @@ -873,10 +1043,20 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 + !**/binary/** + deploy_datahub_head: + name: Deploy to Datahub HEAD + runs-on: ubuntu-latest + needs: [setup, smoke_test] + steps: + - uses: aws-actions/configure-aws-credentials@v1 + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} + with: + aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} + aws-region: us-west-2 + - uses: isbang/sqs-action@v0.2.0 + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities + sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} + message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index ebe2990f3a3cdd..9d63663693f902 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -4,9 +4,19 @@ on: pull_request: branches: - "**" + paths: + - "metadata-ingestion/**" + - "metadata-models/**" + - "docs/**" + - "docs-website/**" push: branches: - master + paths: + - "metadata-ingestion/**" + - "metadata-models/**" + - "docs/**" + - "docs-website/**" # release: # types: [published, edited] @@ -17,13 +27,20 @@ concurrency: jobs: gh-pages: runs-on: ubuntu-latest + permissions: + contents: write steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 + # We explicitly don't use acryldata/sane-checkout-action because docusaurus runs + # git commands to determine the last change date for each file. + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/lint-actions.yml b/.github/workflows/lint-actions.yml index 6f34bf292bf51a..4d83adbeba08a1 100644 --- a/.github/workflows/lint-actions.yml +++ b/.github/workflows/lint-actions.yml @@ -10,7 +10,7 @@ jobs: actionlint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: acryldata/sane-checkout-action@v3 - uses: reviewdog/action-actionlint@v1 with: reporter: github-pr-review diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index 699ca330ce0ac6..51b97552eb150a 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -6,13 +6,15 @@ on: paths: - ".github/workflows/metadata-ingestion.yml" - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" - "metadata-models/**" pull_request: branches: - "**" paths: - - ".github/**" + - ".github/workflows/metadata-ingestion.yml" - "metadata-ingestion/**" + - "metadata-ingestion-modules/**" - "metadata-models/**" release: types: [published] @@ -31,7 +33,7 @@ jobs: # DATAHUB_LOOKML_GIT_TEST_SSH_KEY: ${{ secrets.DATAHUB_LOOKML_GIT_TEST_SSH_KEY }} strategy: matrix: - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] command: [ "testQuick", @@ -40,11 +42,26 @@ jobs: "testIntegrationBatch2", ] include: - - python-version: "3.7" + - python-version: "3.8" - python-version: "3.10" fail-fast: false steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - uses: acryldata/sane-checkout-action@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - uses: actions/cache@v4 + with: + path: | + ~/.cache/uv + key: ${{ runner.os }}-uv-${{ hashFiles('**/requirements.txt') }} - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} @@ -73,6 +90,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Upload coverage to Codecov if: ${{ always() && matrix.python-version == '3.10' }} uses: codecov/codecov-action@v3 diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 48f230ce14c8db..6797c7ad67c0b6 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -24,23 +24,42 @@ concurrency: cancel-in-progress: true jobs: + setup: + runs-on: ubuntu-latest + outputs: + frontend_change: ${{ steps.ci-optimize.outputs.frontend-change == 'true' }} + ingestion_change: ${{ steps.ci-optimize.outputs.ingestion-change == 'true' }} + backend_change: ${{ steps.ci-optimize.outputs.backend-change == 'true' }} + docker_change: ${{ steps.ci-optimize.outputs.docker-change == 'true' }} + frontend_only: ${{ steps.ci-optimize.outputs.frontend-only == 'true' }} + ingestion_only: ${{ steps.ci-optimize.outputs.ingestion-only == 'true' }} + kafka_setup_change: ${{ steps.ci-optimize.outputs.kafka-setup-change == 'true' }} + mysql_setup_change: ${{ steps.ci-optimize.outputs.mysql-setup-change == 'true' }} + postgres_setup_change: ${{ steps.ci-optimize.outputs.postgres-setup-change == 'true' }} + elasticsearch_setup_change: ${{ steps.ci-optimize.outputs.elasticsearch-setup-change == 'true' }} + steps: + - name: Check out the repo + uses: acryldata/sane-checkout-action@v3 + - uses: ./.github/actions/ci-optimization + id: ci-optimize build: runs-on: ubuntu-latest timeout-minutes: 60 + needs: setup steps: - - uses: actions/checkout@v3 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 + if: ${{ needs.setup.outputs.ingestion_change == 'true' }} with: python-version: "3.10" + cache: "pip" - name: Gradle build (and test) - # there is some race condition in gradle build, which makes gradle never terminate in ~30% of the runs - # running build first without datahub-web-react:yarnBuild and then with it is 100% stable - # datahub-frontend:unzipAssets depends on datahub-web-react:yarnBuild but gradle does not know about it run: | ./gradlew :metadata-io:test - uses: actions/upload-artifact@v3 @@ -51,6 +70,7 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml + !**/binary/** - name: Ensure codegen is updated uses: ./.github/actions/ensure-codegen-updated diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index 9d54c88eee591f..558b7c80f727c1 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -3,9 +3,8 @@ on: push: branches: - master - paths-ignore: - - "docs/**" - - "**.md" + paths: + - "metadata-models/**" release: types: [published] @@ -30,14 +29,34 @@ jobs: runs-on: ubuntu-latest needs: setup steps: - - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 + - uses: acryldata/sane-checkout-action@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh - name: Run model generation run: ./gradlew :metadata-models:build + - name: Generate metadata files + if: ${{ needs.setup.outputs.publish == 'true' }} + run: ./gradlew :metadata-ingestion:modelDocGen + - name: Configure AWS Credentials + if: ${{ needs.setup.outputs.publish == 'true' }} + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.ACRYL_CI_ARTIFACTS_ACCESS_KEY }} + aws-region: us-west-2 + - name: Upload metadata to S3 + if: ${{ needs.setup.outputs.publish == 'true' }} + run: aws s3 cp ./metadata-ingestion/generated/docs/metadata_model_mces.json s3://${{ secrets.ACRYL_CI_ARTIFACTS_BUCKET }}/datahub/demo/metadata/ - name: Upload metadata to DataHub if: ${{ needs.setup.outputs.publish == 'true' }} env: diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml index 82bb90f68f4c32..52f83b3be5283d 100644 --- a/.github/workflows/pr-labeler.yml +++ b/.github/workflows/pr-labeler.yml @@ -14,3 +14,69 @@ jobs: with: repo-token: "${{ secrets.GITHUB_TOKEN }}" configuration-path: ".github/pr-labeler-config.yml" + - uses: actions-ecosystem/action-add-labels@v1.1.0 + # only add names of Acryl Data team members here + if: + ${{ + !contains( + fromJson('[ + "anshbansal", + "asikowitz", + "chriscollins3456", + "david-leifker", + "shirshanka", + "sid-acryl", + "swaroopjagadish", + "treff7es", + "yoonhyejin", + "eboneil", + "gabe-lyons", + "hsheth2", + "jjoyce0510", + "maggiehays", + "pedro93", + "RyanHolstien", + "Kunal-kankriya", + "purnimagarg1", + "dushayntAW", + "sagar-salvi-apptware", + "kushagra-apptware", + "Salman-Apptware", + "mayurinehate", + "noggi", + "skrydal" + ]'), + github.actor + ) + }} + with: + github_token: ${{ github.token }} + labels: | + community-contribution + - uses: actions-ecosystem/action-add-labels@v1.1.0 + # only add names of champions here. Confirm with DevRel Team + if: + ${{ + contains( + fromJson('[ + "siladitya2", + "sgomezvillamor", + "ngamanda", + "HarveyLeo", + "frsann", + "bossenti", + "nikolakasev", + "PatrickfBraz", + "cuong-pham", + "sudhakarast", + "tkdrahn", + "rtekal", + "sgm44" + ]'), + github.actor + ) + }} + with: + github_token: ${{ github.token }} + labels: | + datahub-community-champion diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index 7cd07b130dd800..7137302c73564c 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -36,7 +36,7 @@ jobs: tag: ${{ steps.tag.outputs.tag }} steps: - name: Checkout - uses: actions/checkout@v3 + uses: acryldata/sane-checkout-action@v3 - name: Compute Tag id: tag run: | @@ -48,21 +48,21 @@ jobs: needs: ["check-secret", "setup"] if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: checkout upstream repo run: | git remote add upstream https://github.com/datahub-project/datahub.git - git fetch upstream --tags --force + git fetch upstream --tags --force --filter=tree:0 - name: publish datahub-client jar snapshot if: ${{ github.event_name != 'release' }} env: @@ -167,3 +167,29 @@ jobs: echo signingKey=$SIGNING_KEY >> gradle.properties ./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-auth:auth-api:publish ./gradlew :metadata-auth:auth-api:closeAndReleaseRepository --info + - name: publish datahub-custom-plugin-lib snapshot jar + if: ${{ github.event_name != 'release' }} + env: + RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }} + RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }} + SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }} + SIGNING_KEY: ${{ secrets.SIGNING_KEY }} + NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }} + NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }} + run: | + echo signingKey=$SIGNING_KEY >> gradle.properties + ./gradlew :metadata-integration:java:custom-plugin-lib:printVersion + ./gradlew :metadata-integration:java:custom-plugin-lib:publish + - name: release datahub-custom-plugin-lib jar + if: ${{ github.event_name == 'release' }} + env: + RELEASE_USERNAME: ${{ secrets.RELEASE_USERNAME }} + RELEASE_PASSWORD: ${{ secrets.RELEASE_PASSWORD }} + SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }} + SIGNING_KEY: ${{ secrets.SIGNING_KEY }} + NEXUS_USERNAME: ${{ secrets.NEXUS_USERNAME }} + NEXUS_PASSWORD: ${{ secrets.NEXUS_PASSWORD }} + run: | + echo signingKey=$SIGNING_KEY >> gradle.properties + ./gradlew -PreleaseVersion=${{ needs.setup.outputs.tag }} :metadata-integration:java:custom-plugin-lib:publish + ./gradlew :metadata-integration:java:custom-plugin-lib:closeAndReleaseRepository --info diff --git a/.github/workflows/qodana-scan.yml b/.github/workflows/qodana-scan.yml index 022ec9e6eda6a9..750cf24ad38e57 100644 --- a/.github/workflows/qodana-scan.yml +++ b/.github/workflows/qodana-scan.yml @@ -14,7 +14,7 @@ jobs: qodana: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: acryldata/sane-checkout-action@v3 - name: "Qodana Scan" uses: JetBrains/qodana-action@v2022.3.4 - uses: github/codeql-action/upload-sarif@v2 diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 541b2019b93ef1..8ffc8420ba9413 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -29,22 +29,25 @@ jobs: spark-smoke-test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 800 - fetch-tags: true - - name: Set up JDK 11 + - uses: acryldata/sane-checkout-action@v3 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 + - uses: gradle/actions/setup-gradle@v3 - uses: actions/setup-python@v4 with: python-version: "3.10" + cache: "pip" - name: Install dependencies run: ./metadata-ingestion/scripts/install_deps.sh + - name: Disk Check + run: df -h . && docker images - name: Remove images run: docker image prune -a -f || true + - name: Disk Check + run: df -h . && docker images - name: Smoke test run: | ./gradlew :metadata-integration:java:spark-lineage:integrationTest \ @@ -52,10 +55,27 @@ jobs: -x :datahub-web-react:yarnLint \ -x :datahub-web-react:yarnGenerate \ -x :datahub-web-react:yarnInstall \ - -x :datahub-web-react:yarnQuickBuild \ - -x :datahub-web-react:copyAssets \ + -x :datahub-web-react:yarnBuild \ -x :datahub-web-react:distZip \ -x :datahub-web-react:jar + - name: store logs + if: failure() + run: | + docker ps -a + docker logs datahub-gms >& gms-${{ matrix.test_strategy }}.log || true + docker logs datahub-actions >& actions-${{ matrix.test_strategy }}.log || true + docker logs broker >& broker-${{ matrix.test_strategy }}.log || true + docker logs mysql >& mysql-${{ matrix.test_strategy }}.log || true + docker logs elasticsearch >& elasticsearch-${{ matrix.test_strategy }}.log || true + docker logs datahub-frontend-react >& frontend-${{ matrix.test_strategy }}.log || true + - name: Upload logs + uses: actions/upload-artifact@v3 + if: failure() + with: + name: docker logs + path: | + "**/build/container-logs/*.log" + "*.log" - uses: actions/upload-artifact@v3 if: always() with: @@ -64,10 +84,4 @@ jobs: **/build/reports/tests/test/** **/build/test-results/test/** **/junit.*.xml - - name: Slack failure notification - if: failure() && github.event_name == 'push' - uses: kpritam/slack-job-status-action@v1 - with: - job-status: ${{ job.status }} - slack-bot-token: ${{ secrets.SLACK_BOT_TOKEN }} - channel: github-activities + !**/binary/** diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml index 0153060692271c..c94a5fc340f473 100644 --- a/.github/workflows/test-results.yml +++ b/.github/workflows/test-results.yml @@ -2,7 +2,7 @@ name: Test Results on: workflow_run: - workflows: ["build & test", "metadata ingestion", "Airflow Plugin"] + workflows: ["build & test", "metadata ingestion", "Airflow Plugin", "Dagster Plugin"] types: - completed diff --git a/.gitignore b/.gitignore index 49ab5c475096c9..43c627f9ed244f 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ venv.bak/ dmypy.json MANIFEST *.pyc +.python-version # Generated files **/bin @@ -125,3 +126,6 @@ metadata-service/war/bin/ metadata-utils/bin/ test-models/bin/ +datahub-executor/ +datahub-integrations-service/ +metadata-ingestion-modules/acryl-cloud diff --git a/README.md b/README.md index 79f85433fbc184..3ac0668918f708 100644 --- a/README.md +++ b/README.md @@ -22,13 +22,13 @@ HOSTED_DOCS_ONLY-->

-# DataHub: The Metadata Platform for the Modern Data Stack +# DataHub: The Data Discovery Platform for the Modern Data Stack ## Built with ❤️ by [Acryl Data](https://acryldata.io) and [LinkedIn](https://engineering.linkedin.com) [![Version](https://img.shields.io/github/v/release/datahub-project/datahub?include_prereleases)](https://github.com/datahub-project/datahub/releases/latest) [![PyPI version](https://badge.fury.io/py/acryl-datahub.svg)](https://badge.fury.io/py/acryl-datahub) [![build & test](https://github.com/datahub-project/datahub/workflows/build%20&%20test/badge.svg?branch=master&event=push)](https://github.com/datahub-project/datahub/actions?query=workflow%3A%22build+%26+test%22+branch%3Amaster+event%3Apush) -[![Docker Pulls](https://img.shields.io/docker/pulls/linkedin/datahub-gms.svg)](https://hub.docker.com/r/linkedin/datahub-gms) -[![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://slack.datahubproject.io) +[![Docker Pulls](https://img.shields.io/docker/pulls/acryldata/datahub-gms.svg)](https://hub.docker.com/r/acryldata/datahub-gms) +[![Slack](https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&style=social)](https://datahubproject.io/slack?utm_source=github&utm_medium=readme&utm_campaign=github_readme) [![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg)](https://github.com/datahub-project/datahub/blob/master/docs/CONTRIBUTING.md) [![GitHub commit activity](https://img.shields.io/github/commit-activity/m/datahub-project/datahub)](https://github.com/datahub-project/datahub/pulls?q=is%3Apr) [![License](https://img.shields.io/github/license/datahub-project/datahub)](https://github.com/datahub-project/datahub/blob/master/LICENSE) @@ -40,7 +40,7 @@ HOSTED_DOCS_ONLY--> --- [Quickstart](https://datahubproject.io/docs/quickstart) | -[Features](https://datahubproject.io/docs/features) | +[Features](https://datahubproject.io/docs/) | [Roadmap](https://feature-requests.datahubproject.io/roadmap) | [Adoption](#adoption) | [Demo](https://demo.datahubproject.io/) | @@ -61,7 +61,7 @@ HOSTED_DOCS_ONLY--> ## Introduction -DataHub is an open-source metadata platform for the modern data stack. Read about the architectures of different metadata systems and why DataHub excels [here](https://engineering.linkedin.com/blog/2020/datahub-popular-metadata-architectures-explained). Also read our +DataHub is an open-source data catalog for the modern data stack. Read about the architectures of different metadata systems and why DataHub excels [here](https://engineering.linkedin.com/blog/2020/datahub-popular-metadata-architectures-explained). Also read our [LinkedIn Engineering blog post](https://engineering.linkedin.com/blog/2019/data-hub), check out our [Strata presentation](https://speakerdeck.com/shirshanka/the-evolution-of-metadata-linkedins-journey-strata-nyc-2019) and watch our [Crunch Conference Talk](https://www.youtube.com/watch?v=OB-O0Y6OYDE). You should also visit [DataHub Architecture](docs/architecture/architecture.md) to get a better understanding of how DataHub is implemented. ## Features & Roadmap @@ -106,7 +106,7 @@ We welcome contributions from the community. Please refer to our [Contributing G ## Community -Join our [Slack workspace](https://slack.datahubproject.io) for discussions and important announcements. You can also find out more about our upcoming [town hall meetings](docs/townhalls.md) and view past recordings. +Join our [Slack workspace](https://datahubproject.io/slack?utm_source=github&utm_medium=readme&utm_campaign=github_readme) for discussions and important announcements. You can also find out more about our upcoming [town hall meetings](docs/townhalls.md) and view past recordings. ## Adoption @@ -118,6 +118,7 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [Cabify](https://cabify.tech/) - [ClassDojo](https://www.classdojo.com/) - [Coursera](https://www.coursera.org/) +- [CVS Health](https://www.cvshealth.com/) - [DefinedCrowd](http://www.definedcrowd.com) - [DFDS](https://www.dfds.com/) - [Digital Turbine](https://www.digitalturbine.com/) @@ -137,11 +138,11 @@ Here are the companies that have officially adopted DataHub. Please feel free to - [Peloton](https://www.onepeloton.com) - [PITS Global Data Recovery Services](https://www.pitsdatarecovery.net/) - [Razer](https://www.razer.com) -- [Saxo Bank](https://www.home.saxo) - [Showroomprive](https://www.showroomprive.com/) - [SpotHero](https://spothero.com) - [Stash](https://www.stash.com) - [Shanghai HuaRui Bank](https://www.shrbank.com) +- [s7 Airlines](https://www.s7.ru/) - [ThoughtWorks](https://www.thoughtworks.com) - [TypeForm](http://typeform.com) - [Udemy](https://www.udemy.com/) diff --git a/SECURITY.md b/SECURITY.md index 3ca87b08d844df..0e301d37483739 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,4 +1,4 @@ -# Reporting security issues +# Reporting Security Issues If you think you have found a security vulnerability, please send a report to security@datahubproject.io. This address can be used for all of Acryl Data’s open source and commercial products (including but not limited to DataHub and Acryl Data). We can accept only vulnerability reports at this address. diff --git a/build.gradle b/build.gradle index cf55a59cfe6942..adb45705c0ebd0 100644 --- a/build.gradle +++ b/build.gradle @@ -1,45 +1,84 @@ buildscript { + ext.jdkVersionDefault = 17 + ext.javaClassVersionDefault = 11 + + def springModules = ['mae-consumer', 'mce-consumer', 'pe-consumer'] + + ext.jdkVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any{ + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }} || springModules.contains(p.name)) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjdkVersionDefault + return p.hasProperty('jdkVersionDefault') ? Integer.valueOf((String) p.getProperty('jdkVersionDefault')) : ext.jdkVersionDefault + } + } + + ext.javaClassVersion = { p -> + // If Spring 6 is present, hard dependency on jdk17 + if (p.configurations.any { it.getDependencies().any { + (it.getGroup().equals("org.springframework") && it.getVersion().startsWith("6.")) + || (it.getGroup().equals("org.springframework.boot") && it.getVersion().startsWith("3.") && !it.getName().equals("spring-boot-starter-test")) + }} || springModules.contains(p.name)) { + return 17 + } else { + // otherwise we can use the preferred default which can be overridden with a property: -PjavaClassVersionDefault + return p.hasProperty('javaClassVersionDefault') ? Integer.valueOf((String) p.getProperty('javaClassVersionDefault')) : ext.javaClassVersionDefault + } + } + ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.45.0' + ext.pegasusVersion = '29.57.0' ext.mavenVersion = '3.6.3' - ext.springVersion = '5.3.29' - ext.springBootVersion = '2.7.14' + ext.springVersion = '6.1.5' + ext.springBootVersion = '3.2.6' + ext.springKafkaVersion = '3.1.6' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '4.4.9' + ext.neo4jVersion = '5.14.0' + ext.neo4jTestVersion = '5.14.0' + ext.neo4jApocVersion = '5.14.0' ext.testContainersVersion = '1.17.4' - ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x - ext.jacksonVersion = '2.15.2' - ext.jettyVersion = '9.4.46.v20220331' - ext.playVersion = '2.8.18' - ext.log4jVersion = '2.19.0' + ext.elasticsearchVersion = '2.11.1' // ES 7.10, Opensearch 1.x, 2.x + ext.jacksonVersion = '2.15.3' + ext.jettyVersion = '11.0.21' + ext.playVersion = '2.8.22' + ext.log4jVersion = '2.23.1' ext.slf4jVersion = '1.7.36' - ext.logbackClassic = '1.2.12' + ext.logbackClassic = '1.4.14' ext.hadoop3Version = '3.3.5' - ext.kafkaVersion = '2.3.0' - ext.hazelcastVersion = '5.3.1' + ext.kafkaVersion = '5.5.15' + ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' + ext.googleJavaFormatVersion = '1.18.1' + ext.openLineageVersion = '1.19.0' + ext.logbackClassicJava8 = '1.2.12' - ext.docker_registry = 'linkedin' + ext.docker_registry = 'acryldata' apply from: './repositories.gradle' buildscript.repositories.addAll(project.repositories) dependencies { classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion - classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4' + classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1' classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" classpath "org.gradle.playframework:gradle-playframework:0.14" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2" } } plugins { - id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' - id 'com.github.johnrengelman.shadow' version '6.1.0' - id 'com.palantir.docker' version '0.35.0' + id 'com.gorylenko.gradle-git-properties' version '2.4.1' + id 'com.github.johnrengelman.shadow' version '8.1.1' apply false + id 'com.palantir.docker' version '0.35.0' apply false + id 'com.avast.gradle.docker-compose' version '0.17.6' + id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -51,13 +90,14 @@ project.ext.spec = [ 'pegasus' : [ 'd2' : 'com.linkedin.pegasus:d2:' + pegasusVersion, 'data' : 'com.linkedin.pegasus:data:' + pegasusVersion, - 'dataAvro1_6' : 'com.linkedin.pegasus:data-avro-1_6:' + pegasusVersion, + 'dataAvro': 'com.linkedin.pegasus:data-avro:' + pegasusVersion, 'generator': 'com.linkedin.pegasus:generator:' + pegasusVersion, 'restliCommon' : 'com.linkedin.pegasus:restli-common:' + pegasusVersion, 'restliClient' : 'com.linkedin.pegasus:restli-client:' + pegasusVersion, 'restliDocgen' : 'com.linkedin.pegasus:restli-docgen:' + pegasusVersion, 'restliServer' : 'com.linkedin.pegasus:restli-server:' + pegasusVersion, 'restliSpringBridge': 'com.linkedin.pegasus:restli-spring-bridge:' + pegasusVersion, + 'restliTestUtils' : 'com.linkedin.pegasus:restli-client-testutils:' + pegasusVersion, ] ] ] @@ -69,22 +109,23 @@ project.ext.externalDependency = [ 'assertJ': 'org.assertj:assertj-core:3.11.1', 'avro': 'org.apache.avro:avro:1.11.3', 'avroCompiler': 'org.apache.avro:avro-compiler:1.11.3', - 'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.10', - 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:1.1.1', - 'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.8', - 'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.0', + 'awsGlueSchemaRegistrySerde': 'software.amazon.glue:schema-registry-serde:1.1.17', + 'awsMskIamAuth': 'software.amazon.msk:aws-msk-iam-auth:2.0.3', + 'awsS3': 'software.amazon.awssdk:s3:2.26.21', + 'awsSecretsManagerJdbc': 'com.amazonaws.secretsmanager:aws-secretsmanager-jdbc:1.0.13', + 'awsPostgresIamAuth': 'software.amazon.jdbc:aws-advanced-jdbc-wrapper:1.0.2', 'awsRds':'software.amazon.awssdk:rds:2.18.24', - 'cacheApi' : 'javax.cache:cache-api:1.1.0', + 'cacheApi': 'javax.cache:cache-api:1.1.0', 'commonsCli': 'commons-cli:commons-cli:1.5.0', 'commonsIo': 'commons-io:commons-io:2.4', 'commonsLang': 'commons-lang:commons-lang:2.6', 'commonsText': 'org.apache.commons:commons-text:1.10.0', 'commonsCollections': 'commons-collections:commons-collections:3.2.2', - 'data' : 'com.linkedin.pegasus:data:' + pegasusVersion, + 'caffeine': 'com.github.ben-manes.caffeine:caffeine:3.1.8', 'datastaxOssNativeProtocol': 'com.datastax.oss:native-protocol:1.5.1', 'datastaxOssCore': 'com.datastax.oss:java-driver-core:4.14.1', 'datastaxOssQueryBuilder': 'com.datastax.oss:java-driver-query-builder:4.14.1', - 'dgraph4j' : 'io.dgraph:dgraph4j:21.03.1', + 'dgraph4j' : 'io.dgraph:dgraph4j:21.12.0', 'dropwizardMetricsCore': 'io.dropwizard.metrics:metrics-core:4.2.3', 'dropwizardMetricsJmx': 'io.dropwizard.metrics:metrics-jmx:4.2.3', 'ebean': 'io.ebean:ebean:' + ebeanVersion, @@ -93,11 +134,12 @@ project.ext.externalDependency = [ 'elasticSearchRest': 'org.opensearch.client:opensearch-rest-high-level-client:' + elasticsearchVersion, 'elasticSearchJava': 'org.opensearch.client:opensearch-java:2.6.0', 'findbugsAnnotations': 'com.google.code.findbugs:annotations:3.0.1', - 'graphqlJava': 'com.graphql-java:graphql-java:19.5', - 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:19.1', + 'graphqlJava': 'com.graphql-java:graphql-java:21.3', + 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:21.0', 'gson': 'com.google.code.gson:gson:2.8.9', - 'guice': 'com.google.inject:guice:4.2.3', - 'guava': 'com.google.guava:guava:32.1.2-jre', + 'guice': 'com.google.inject:guice:7.0.0', + 'guicePlay': 'com.google.inject:guice:5.0.1', // Used for frontend while still on old Play version + 'guava': 'com.google.guava:guava:32.1.3-jre', 'h2': 'com.h2database:h2:2.2.224', 'hadoopCommon':'org.apache.hadoop:hadoop-common:2.7.2', 'hadoopMapreduceClient':'org.apache.hadoop:hadoop-mapreduce-client-core:2.7.2', @@ -107,8 +149,7 @@ project.ext.externalDependency = [ 'hazelcastSpring':"com.hazelcast:hazelcast-spring:$hazelcastVersion", 'hazelcastTest':"com.hazelcast:hazelcast:$hazelcastVersion:tests", 'hibernateCore': 'org.hibernate:hibernate-core:5.2.16.Final', - 'httpClient': 'org.apache.httpcomponents:httpclient:4.5.9', - 'httpAsyncClient': 'org.apache.httpcomponents:httpasyncclient:4.1.5', + 'httpClient': 'org.apache.httpcomponents.client5:httpclient5:5.3', 'iStackCommons': 'com.sun.istack:istack-commons-runtime:4.0.1', 'jacksonJDK8': "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$jacksonVersion", 'jacksonDataPropertyFormat': "com.fasterxml.jackson.dataformat:jackson-dataformat-properties:$jacksonVersion", @@ -119,49 +160,58 @@ project.ext.externalDependency = [ 'javatuples': 'org.javatuples:javatuples:1.2', 'javaxInject' : 'javax.inject:javax.inject:1', 'javaxValidation' : 'javax.validation:validation-api:2.0.1.Final', - 'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.25.1', + 'jakartaValidation': 'jakarta.validation:jakarta.validation-api:3.1.0-M2', + 'jerseyCore': 'org.glassfish.jersey.core:jersey-client:2.41', 'jerseyGuava': 'org.glassfish.jersey.bundles.repackaged:jersey-guava:2.25.1', 'jettyJaas': "org.eclipse.jetty:jetty-jaas:$jettyVersion", 'jettyClient': "org.eclipse.jetty:jetty-client:$jettyVersion", 'jettison': 'org.codehaus.jettison:jettison:1.5.4', 'jgrapht': 'org.jgrapht:jgrapht-core:1.5.1', 'jna': 'net.java.dev.jna:jna:5.12.1', - 'jsonPatch': 'com.github.java-json-tools:json-patch:1.13', + 'jsonPatch': 'jakarta.json:jakarta.json-api:2.1.3', + 'jsonPathImpl': 'org.eclipse.parsson:parsson:1.1.6', 'jsonSimple': 'com.googlecode.json-simple:json-simple:1.1.1', 'jsonSmart': 'net.minidev:json-smart:2.4.9', - 'json': 'org.json:json:20230227', + 'json': 'org.json:json:20231013', + 'jsonSchemaValidator': 'com.github.java-json-tools:json-schema-validator:2.2.14', 'junit': 'junit:junit:4.13.2', 'junitJupiterApi': "org.junit.jupiter:junit-jupiter-api:$junitJupiterVersion", 'junitJupiterParams': "org.junit.jupiter:junit-jupiter-params:$junitJupiterVersion", 'junitJupiterEngine': "org.junit.jupiter:junit-jupiter-engine:$junitJupiterVersion", // avro-serde includes dependencies for `kafka-avro-serializer` `kafka-schema-registry-client` and `avro` - 'kafkaAvroSerde': 'io.confluent:kafka-streams-avro-serde:5.5.1', + 'kafkaAvroSerde': "io.confluent:kafka-streams-avro-serde:$kafkaVersion", 'kafkaAvroSerializer': 'io.confluent:kafka-avro-serializer:5.1.4', - 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion", - 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.3', + 'kafkaClients': "org.apache.kafka:kafka-clients:$kafkaVersion-ccs", + 'snappy': 'org.xerial.snappy:snappy-java:1.1.10.4', 'logbackClassic': "ch.qos.logback:logback-classic:$logbackClassic", + 'logbackClassicJava8' : "ch.qos.logback:logback-classic:$logbackClassicJava8", 'slf4jApi': "org.slf4j:slf4j-api:$slf4jVersion", 'log4jCore': "org.apache.logging.log4j:log4j-core:$log4jVersion", 'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion", 'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion", 'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion", - 'lombok': 'org.projectlombok:lombok:1.18.16', + 'lombok': 'org.projectlombok:lombok:1.18.30', 'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0', 'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion", 'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4', - 'mockito': 'org.mockito:mockito-core:3.0.0', - 'mockitoInline': 'org.mockito:mockito-inline:3.0.0', + 'mockito': 'org.mockito:mockito-core:4.11.0', + 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', - 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', - 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jVersion, + 'mysqlConnector': 'mysql:mysql-connector-java:8.0.28', + 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, + 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, + 'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion, + 'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', 'parquet': 'org.apache.parquet:parquet-avro:1.12.3', + 'parquetHadoop': 'org.apache.parquet:parquet-hadoop:1.13.1', 'picocli': 'info.picocli:picocli:4.5.0', 'playCache': "com.typesafe.play:play-cache_2.12:$playVersion", + 'playCaffeineCache': "com.typesafe.play:play-caffeine-cache_2.12:$playVersion", 'playWs': 'com.typesafe.play:play-ahc-ws-standalone_2.12:2.1.10', 'playDocs': "com.typesafe.play:play-docs_2.12:$playVersion", 'playGuice': "com.typesafe.play:play-guice_2.12:$playVersion", @@ -172,21 +222,22 @@ project.ext.externalDependency = [ 'playFilters': "com.typesafe.play:filters-helpers_2.12:$playVersion", 'pac4j': 'org.pac4j:pac4j-oidc:4.5.7', 'playPac4j': 'org.pac4j:play-pac4j_2.12:9.0.2', - 'postgresql': 'org.postgresql:postgresql:42.3.8', + 'postgresql': 'org.postgresql:postgresql:42.3.9', 'protobuf': 'com.google.protobuf:protobuf-java:3.19.6', + 'grpcProtobuf': 'io.grpc:grpc-protobuf:1.53.0', 'rangerCommons': 'org.apache.ranger:ranger-plugins-common:2.3.0', 'reflections': 'org.reflections:reflections:0.9.9', 'resilience4j': 'io.github.resilience4j:resilience4j-retry:1.7.1', 'rythmEngine': 'org.rythmengine:rythm-engine:1.3.0', - 'servletApi': 'javax.servlet:javax.servlet-api:3.1.0', - 'shiroCore': 'org.apache.shiro:shiro-core:1.11.0', + 'servletApi': 'jakarta.servlet:jakarta.servlet-api:6.0.0', + 'shiroCore': 'org.apache.shiro:shiro-core:1.13.0', 'snakeYaml': 'org.yaml:snakeyaml:2.0', - 'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8', - 'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8', + 'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3', + 'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3', 'springBeans': "org.springframework:spring-beans:$springVersion", 'springContext': "org.springframework:spring-context:$springVersion", 'springCore': "org.springframework:spring-core:$springVersion", - 'springDocUI': 'org.springdoc:springdoc-openapi-ui:1.6.14', + 'springDocUI': 'org.springdoc:springdoc-openapi-starter-webmvc-ui:2.3.0', 'springJdbc': "org.springframework:spring-jdbc:$springVersion", 'springWeb': "org.springframework:spring-web:$springVersion", 'springWebMVC': "org.springframework:spring-webmvc:$springVersion", @@ -197,11 +248,13 @@ project.ext.externalDependency = [ 'springBootStarterJetty': "org.springframework.boot:spring-boot-starter-jetty:$springBootVersion", 'springBootStarterCache': "org.springframework.boot:spring-boot-starter-cache:$springBootVersion", 'springBootStarterValidation': "org.springframework.boot:spring-boot-starter-validation:$springBootVersion", - 'springKafka': 'org.springframework.kafka:spring-kafka:2.8.11', + 'springKafka': "org.springframework.kafka:spring-kafka:$springKafkaVersion", 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", + 'springRetry': "org.springframework.retry:spring-retry:2.0.6", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', - 'testngJava8': 'org.testng:testng:7.5.1', + 'swaggerCore': 'io.swagger.core.v3:swagger-core:2.2.7', + 'springBootAutoconfigureJdk11': 'org.springframework.boot:spring-boot-autoconfigure:2.7.18', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -212,20 +265,87 @@ project.ext.externalDependency = [ 'testContainersOpenSearch': 'org.opensearch:opensearch-testcontainers:2.0.0', 'typesafeConfig':'com.typesafe:config:1.4.1', 'wiremock':'com.github.tomakehurst:wiremock:2.10.0', - 'zookeeper': 'org.apache.zookeeper:zookeeper:3.4.14', + 'zookeeper': 'org.apache.zookeeper:zookeeper:3.7.2', 'wire': 'com.squareup.wire:wire-compiler:3.7.1', 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', - 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - + 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0', + 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2', + 'jakartaAnnotationApi': 'jakarta.annotation:jakarta.annotation-api:3.0.0', + 'classGraph': 'io.github.classgraph:classgraph:4.8.172', ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' - apply plugin: 'checkstyle' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' + + tasks.withType(Test).configureEach { task -> if (task.project.name != "metadata-io") { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + + if (project.configurations.getByName("testImplementation").getDependencies() + .any { it.getName().contains("testng") }) { + useTestNG() + } + } + } + + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application'))) { + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + // Puts parameter names into compiled class files, necessary for Spring 6 + options.compilerArgs.add("-parameters") + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + /** + * If making changes to this section also see the sections for pegasus below + * which use project.plugins.hasPlugin('pegasus') + **/ + if (!project.plugins.hasPlugin('pegasus') && (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application'))) { + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + } + } } configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { @@ -246,6 +366,7 @@ subprojects { apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' + apply plugin: 'com.diffplug.spotless' gitProperties { keys = ['git.commit.id','git.commit.id.describe','git.commit.time'] @@ -256,11 +377,17 @@ subprojects { failOnNoGitDirectory = false } - plugins.withType(JavaPlugin) { + plugins.withType(JavaPlugin).configureEach { + if (project.name == 'datahub-web-react') { + return + } + dependencies { + implementation externalDependency.annotationApi constraints { - implementation('io.netty:netty-all:4.1.86.Final') - implementation('org.apache.commons:commons-compress:1.21') + implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") + implementation('io.netty:netty-all:4.1.100.Final') + implementation('org.apache.commons:commons-compress:1.26.0') implementation('org.apache.velocity:velocity-engine-core:2.3') implementation('org.hibernate:hibernate-validator:6.0.20.Final') implementation("com.fasterxml.jackson.core:jackson-databind:$jacksonVersion") @@ -268,30 +395,67 @@ subprojects { } } - checkstyle { - configDirectory = file("${project.rootDir}/gradle/checkstyle") - sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ] - toolVersion = "8.0" - maxWarnings = 0 - ignoreFailures = false + spotless { + java { + googleJavaFormat() + target project.fileTree(project.projectDir) { + include 'src/**/*.java' + exclude 'src/**/resources/' + exclude 'src/**/generated/' + exclude 'src/**/mainGeneratedDataTemplate/' + exclude 'src/**/mainGeneratedRest/' + exclude 'src/renamed/avro/' + exclude 'src/test/sample-test-plugins/' + } + } } - } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(11) - } - } - tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) + if (project.plugins.hasPlugin('pegasus')) { + dependencies { + dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 + restClientCompile spec.product.pegasus.restliClient + } + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + // Puts parameter names into compiled class files, necessary for Spring 6 + options.compilerArgs.add("-parameters") + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } } - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() + afterEvaluate { + def spotlessJavaTask = tasks.findByName('spotlessJava') + def processTask = tasks.findByName('processResources') + if (processTask != null) { + spotlessJavaTask.dependsOn processTask + } + def compileJavaTask = tasks.findByName('compileJava') + if (compileJavaTask != null) { + spotlessJavaTask.dependsOn compileJavaTask + } + // TODO - Do not run this in CI. How? + // tasks.withType(JavaCompile) { + // finalizedBy(tasks.findByName('spotlessApply')) + // } } } @@ -299,8 +463,19 @@ subprojects { if (project.plugins.hasPlugin('pegasus')) { dependencies { dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } + + compileJava { + options.release = javaClassVersion(project) + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion(project)) + } + } } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1f9d30d520171b..0101d1b717205a 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,9 +1,11 @@ -apply plugin: 'java' - buildscript { apply from: '../repositories.gradle' } +plugins { + id 'java' +} + dependencies { /** * Forked version of abandoned repository: https://github.com/fge/json-schema-avro @@ -12,15 +14,16 @@ dependencies { * Other companies are also separately maintaining forks (like: https://github.com/java-json-tools/json-schema-avro). * We have built several customizations on top of it for various bug fixes, especially around union scheams */ - implementation('io.acryl:json-schema-avro:0.2.2') { + implementation('io.acryl:json-schema-avro:0.2.3') { exclude group: 'com.fasterxml.jackson.core', module: 'jackson-databind' exclude group: 'com.google.guava', module: 'guava' } + implementation 'com.github.java-json-tools:json-schema-validator:2.2.14' implementation 'com.google.guava:guava:32.1.2-jre' implementation 'com.fasterxml.jackson.core:jackson-databind:2.13.5' implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' implementation 'commons-io:commons-io:2.11.0' - compileOnly 'org.projectlombok:lombok:1.18.14' - annotationProcessor 'org.projectlombok:lombok:1.18.14' -} \ No newline at end of file + compileOnly 'org.projectlombok:lombok:1.18.30' + annotationProcessor 'org.projectlombok:lombok:1.18.30' +} diff --git a/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config b/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config new file mode 120000 index 00000000000000..087629f8ac1df2 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/metadata/aspect/plugins/config @@ -0,0 +1 @@ +../../../../../../../../../entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config \ No newline at end of file diff --git a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java index 25bf239ab835b7..1c9dfd46866102 100644 --- a/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java +++ b/buildSrc/src/main/java/io/datahubproject/GenerateJsonSchemaTask.java @@ -183,6 +183,7 @@ private void generateSchema(final File file) { final String fileBaseName; try { final JsonNode schema = JsonLoader.fromFile(file); + final JsonNode result = buildResult(schema.toString()); String prettySchema = JacksonUtils.prettyPrint(result); Path absolutePath = file.getAbsoluteFile().toPath(); @@ -195,11 +196,21 @@ private void generateSchema(final File file) { } else { fileBaseName = getBaseName(file.getName()); } - Files.write(Paths.get(jsonDirectory + sep + fileBaseName + ".json"), + + final String targetName; + if (schema.has("Aspect") && schema.get("Aspect").has("name") && + !schema.get("Aspect").get("name").asText().equalsIgnoreCase(fileBaseName)) { + targetName = OpenApiEntities.toUpperFirst(schema.get("Aspect").get("name").asText()); + prettySchema = prettySchema.replaceAll(fileBaseName, targetName); + } else { + targetName = fileBaseName; + } + + Files.write(Paths.get(jsonDirectory + sep + targetName + ".json"), prettySchema.getBytes(StandardCharsets.UTF_8), StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); if (schema.has("Aspect")) { - aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + getBaseName(file.getName()))); + aspectType.add(NODE_FACTORY.objectNode().put("$ref", "#/definitions/" + targetName)); } } catch (IOException | ProcessingException e) { throw new RuntimeException(e); diff --git a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java index 888c4a0e999311..01d61b6119b0a2 100644 --- a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java +++ b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java @@ -6,6 +6,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.models.registry.config.Entities; import com.linkedin.metadata.models.registry.config.Entity; @@ -58,8 +59,13 @@ public class OpenApiEntities { .add("notebookInfo").add("editableNotebookProperties") .add("dataProductProperties") .add("institutionalMemory") + .add("forms").add("formInfo").add("dynamicFormAssignment") + .add("businessAttributeInfo") .build(); + private final static ImmutableSet ENTITY_EXCLUSIONS = ImmutableSet.builder() + .add("structuredProperty") + .build(); public OpenApiEntities(JsonNodeFactory NODE_FACTORY) { this.NODE_FACTORY = NODE_FACTORY; @@ -117,14 +123,27 @@ public ObjectNode entityExtension(List nodesList, ObjectNode schemas return componentsNode; } - private static String toUpperFirst(String s) { - return s.substring(0, 1).toUpperCase() + s.substring(1); + /** + * Convert the pdl model names to desired class names. Upper case first letter unless the 3rd character is upper case. + * i.e. mlModel -> MLModel + * dataset -> Dataset + * dataProduct -> DataProduct + * @param s input string + * @return class name + */ + public static String toUpperFirst(String s) { + if (s.length() > 2 && s.substring(2, 3).equals(s.substring(2, 3).toUpperCase())) { + return s.substring(0, 2).toUpperCase() + s.substring(2); + } else { + return s.substring(0, 1).toUpperCase() + s.substring(1); + } } private Set withEntitySchema(ObjectNode schemasNode, Set definitions) { return entityMap.values().stream() // Make sure the primary key is defined .filter(entity -> definitions.contains(toUpperFirst(entity.getKeyAspect()))) + .filter(entity -> !ENTITY_EXCLUSIONS.contains(entity.getName())) .map(entity -> { final String upperName = toUpperFirst(entity.getName()); @@ -442,6 +461,22 @@ private ObjectNode buildListEntityPath(Entity entity, Set parameterDefin ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", "Create " + upperFirst) .put("operationId", String.format("create", upperFirst)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity ONLY if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", "Create " + entity.getName() + " entities.") .put("required", true) @@ -547,7 +582,7 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode getMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Get %s for %s.", aspect, entity.getName())) - .put("operationId", String.format("get%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("get%s", upperFirstAspect)); getMethod.set("tags", tagsNode); ArrayNode singlePathParametersNode = NODE_FACTORY.arrayNode(); getMethod.set("parameters", singlePathParametersNode); @@ -575,13 +610,13 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { .set("application/json", NODE_FACTORY.objectNode()))); ObjectNode headMethod = NODE_FACTORY.objectNode() .put("summary", String.format("%s on %s existence.", aspect, upperFirstEntity)) - .put("operationId", String.format("head%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("head%s", upperFirstAspect)) .set("responses", headResponses); headMethod.set("tags", tagsNode); ObjectNode deleteMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Delete %s on entity %s", aspect, upperFirstEntity)) - .put("operationId", String.format("delete%s", upperFirstAspect, upperFirstEntity)) + .put("operationId", String.format("delete%s", upperFirstAspect)) .set("responses", NODE_FACTORY.objectNode() .set("200", NODE_FACTORY.objectNode() .put("description", String.format("Delete %s on %s entity.", aspect, upperFirstEntity)) @@ -591,7 +626,23 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity)) - .put("operationId", String.format("create%s", upperFirstAspect, upperFirstEntity)); + .put("operationId", String.format("create%s", upperFirstAspect)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity)) .put("required", true).set("content", NODE_FACTORY.objectNode() diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index fe04c3629fe582..32dfba00d47dbf 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,34 +1,39 @@ package auth; -import auth.sso.SsoConfigs; +import static auth.AuthUtils.*; +import static utils.ConfigUtil.*; + import auth.sso.SsoManager; -import auth.sso.oidc.OidcConfigs; -import auth.sso.oidc.OidcProvider; import client.AuthServiceClient; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; +import com.google.inject.name.Named; import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.metadata.models.registry.EmptyEntityRegistry; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.util.Configuration; import config.ConfigurationProvider; import controllers.SsoCallbackController; - import java.nio.charset.StandardCharsets; -import java.util.ArrayList; import java.util.Collections; -import java.util.List; +import io.datahubproject.metadata.context.ActorContext; +import io.datahubproject.metadata.context.AuthorizerContext; +import io.datahubproject.metadata.context.EntityRegistryContext; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.SearchContext; +import lombok.extern.slf4j.Slf4j; import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; -import org.pac4j.core.client.Client; -import org.pac4j.core.client.Clients; import org.pac4j.core.config.Config; import org.pac4j.core.context.session.SessionStore; import org.pac4j.play.LogoutController; @@ -42,205 +47,252 @@ import play.cache.SyncCacheApi; import utils.ConfigUtil; -import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; -import static utils.ConfigUtil.*; +/** Responsible for configuring, validating, and providing authentication related components. */ +@Slf4j +public class AuthModule extends AbstractModule { + /** + * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration + * value provides a stable encryption base from which to derive the encryption key. + * + *

We hash this value (SHA256), then take the first 16 bytes as the AES key. + */ + private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; -/** - * Responsible for configuring, validating, and providing authentication related components. - */ -public class AuthModule extends AbstractModule { + private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; + private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; + private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + private static final String ENTITY_CLIENT_RESTLI_GET_BATCH_SIZE = "entityClient.restli.get.batchSize"; + private static final String ENTITY_CLIENT_RESTLI_GET_BATCH_CONCURRENCY = "entityClient.restli.get.batchConcurrency"; + private static final String GET_SSO_SETTINGS_ENDPOINT = "auth/getSsoSettings"; + private final com.typesafe.config.Config _configs; + + public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { + _configs = configs; + } + + @Override + protected void configure() { /** - * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration - * value provides a stable encryption base from which to derive the encryption key. - * - * We hash this value (SHA256), then take the first 16 bytes as the AES key. + * In Pac4J, you are given the option to store the profiles of authenticated users in either (i) + * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore + * saves your data in the Play session cookie However there is problem + * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j + * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters, + * the response will be rejected by the browser. Default to PlayCacheCookieStore so that + * datahub-frontend container remains as a stateless service */ - private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; - private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; - private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; - private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - private final com.typesafe.config.Config _configs; - - public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { - _configs = configs; + if (sessionStoreProvider.equals("PlayCacheSessionStore")) { + final PlayCacheSessionStore playCacheSessionStore = + new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); + bind(SessionStore.class).toInstance(playCacheSessionStore); + bind(PlaySessionStore.class).toInstance(playCacheSessionStore); + } else { + PlayCookieSessionStore playCacheCookieStore; + try { + // To generate a valid encryption key from an input value, we first + // hash the input to generate a fixed-length string. Then, we convert + // it to hex and slice the first 16 bytes, because AES key length must strictly + // have a specific length. + final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); + final String aesKeyHash = + DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); + final String aesEncryptionKey = aesKeyHash.substring(0, 16); + playCacheCookieStore = + new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); + } + bind(SessionStore.class).toInstance(playCacheCookieStore); + bind(PlaySessionStore.class).toInstance(playCacheCookieStore); } - @Override - protected void configure() { - /** - * In Pac4J, you are given the option to store the profiles of authenticated users in either - * (i) PlayCacheSessionStore - saves your data in the Play cache or - * (ii) PlayCookieSessionStore saves your data in the Play session cookie - * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie. - * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser. - * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service - */ - String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - - if (sessionStoreProvider.equals("PlayCacheSessionStore")) { - final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); - bind(SessionStore.class).toInstance(playCacheSessionStore); - bind(PlaySessionStore.class).toInstance(playCacheSessionStore); - } else { - PlayCookieSessionStore playCacheCookieStore; - try { - // To generate a valid encryption key from an input value, we first - // hash the input to generate a fixed-length string. Then, we convert - // it to hex and slice the first 16 bytes, because AES key length must strictly - // have a specific length. - final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); - final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); - final String aesEncryptionKey = aesKeyHash.substring(0, 16); - playCacheCookieStore = new PlayCookieSessionStore( - new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); - } catch (Exception e) { - throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); - } - bind(SessionStore.class).toInstance(playCacheCookieStore); - bind(PlaySessionStore.class).toInstance(playCacheCookieStore); - } - - try { - bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor( - SsoManager.class, - Authentication.class, - SystemEntityClient.class, - AuthServiceClient.class, - com.typesafe.config.Config.class)); - } catch (NoSuchMethodException | SecurityException e) { - throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e); - } - // logout - final LogoutController logoutController = new LogoutController(); - logoutController.setDefaultUrl("/"); - bind(LogoutController.class).toInstance(logoutController); + try { + bind(SsoCallbackController.class) + .toConstructor( + SsoCallbackController.class.getConstructor( + SsoManager.class, + OperationContext.class, + SystemEntityClient.class, + AuthServiceClient.class, + org.pac4j.core.config.Config.class, + com.typesafe.config.Config.class)); + } catch (NoSuchMethodException | SecurityException e) { + throw new RuntimeException( + "Failed to bind to SsoCallbackController. Cannot find constructor", e); } + // logout + final LogoutController logoutController = new LogoutController(); + logoutController.setDefaultUrl("/"); + bind(LogoutController.class).toInstance(logoutController); + } - @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); - } + @Provides + @Singleton + protected Config provideConfig() { + Config config = new Config(); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; + } - @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } - return manager; - } + @Provides + @Singleton + protected SsoManager provideSsoManager( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + SsoManager manager = + new SsoManager( + _configs, systemAuthentication, getSsoSettingsRequestUrl(_configs), httpClient); + manager.initializeSsoProvider(); + return manager; + } - @Provides - @Singleton - protected Authentication provideSystemAuthentication() { - // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. - String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); - String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = - new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap()); - } + @Provides + @Singleton + protected Authentication provideSystemAuthentication() { + // Returns an instance of Authentication used to authenticate system initiated calls to Metadata + // Service. + String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); + String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication( + systemActor, + String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); + } - @Provides - @Singleton - protected ConfigurationProvider provideConfigurationProvider() { - AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class); - return context.getBean(ConfigurationProvider.class); - } + @Provides + @Singleton + @Named("systemOperationContext") + protected OperationContext provideOperationContext( + final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + ActorContext systemActorContext = + ActorContext.builder() + .systemAuth(true) + .authentication(systemAuthentication) + .build(); + OperationContextConfig systemConfig = OperationContextConfig.builder() + .viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView()) + .allowSystemAuthentication(true) + .build(); - @Provides - @Singleton - protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication, - final ConfigurationProvider configurationProvider) { - return new SystemRestliEntityClient(buildRestliClient(), - new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), - _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - } + return OperationContext.builder() + .operationContextConfig(systemConfig) + .systemActorContext(systemActorContext) + .searchContext(SearchContext.EMPTY) + .entityRegistryContext(EntityRegistryContext.builder().build(EmptyEntityRegistry.EMPTY)) + // Authorizer.EMPTY doesn't actually apply to system auth + .authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build()) + .build(systemAuthentication); + } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } + @Provides + @Singleton + protected ConfigurationProvider provideConfigurationProvider() { + AnnotationConfigApplicationContext context = + new AnnotationConfigApplicationContext(ConfigurationProvider.class); + return context.getBean(ConfigurationProvider.class); + } - @Provides - @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { - // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( - METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, - systemAuthentication, httpClient); - } + @Provides + @Singleton + protected SystemEntityClient provideEntityClient( + @Named("systemOperationContext") final OperationContext systemOperationContext, + final ConfigurationProvider configurationProvider) { + + return new SystemRestliEntityClient( + buildRestliClient(), + new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), + _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), + configurationProvider.getCache().getClient().getEntityClient(), + Math.max(1, _configs.getInt(ENTITY_CLIENT_RESTLI_GET_BATCH_SIZE)), + Math.max(1, _configs.getInt(ENTITY_CLIENT_RESTLI_GET_BATCH_CONCURRENCY))); + } + + @Provides + @Singleton + protected AuthServiceClient provideAuthClient( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + // Init a GMS auth client + final String metadataServiceHost = getMetadataServiceHost(_configs); + + final int metadataServicePort = getMetadataServicePort(_configs); + + final boolean metadataServiceUseSsl = doesMetadataServiceUseSsl(_configs); + + return new AuthServiceClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + systemAuthentication, + httpClient); + } + + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } - private com.linkedin.restli.client.Client buildRestliClient() { - final String metadataServiceHost = utils.ConfigUtil.getString( + private com.linkedin.restli.client.Client buildRestliClient() { + final String metadataServiceHost = + utils.ConfigUtil.getString( _configs, METADATA_SERVICE_HOST_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = utils.ConfigUtil.getInt( + final int metadataServicePort = + utils.ConfigUtil.getInt( _configs, utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean( + final boolean metadataServiceUseSsl = + utils.ConfigUtil.getBoolean( _configs, utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); - final String metadataServiceSslProtocol = utils.ConfigUtil.getString( + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); + final String metadataServiceSslProtocol = + utils.ConfigUtil.getString( _configs, utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL - ); - return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol); - } + ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL); + return DefaultRestliClientFactory.getRestLiClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + metadataServiceSslProtocol); + } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); - } + protected boolean doesMetadataServiceUseSsl(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); - } -} + protected String getMetadataServiceHost(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + } + protected Integer getMetadataServicePort(com.typesafe.config.Config configs) { + return configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); + } + + protected String getSsoSettingsRequestUrl(com.typesafe.config.Config configs) { + final String protocol = doesMetadataServiceUseSsl(configs) ? "https" : "http"; + final String metadataServiceHost = getMetadataServiceHost(configs); + final Integer metadataServicePort = getMetadataServicePort(configs); + + return String.format( + "%s://%s:%s/%s", + protocol, metadataServiceHost, metadataServicePort, GET_SSO_SETTINGS_ENDPOINT); + } +} diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 386eee725c83d0..51bb784c61b3b1 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -1,137 +1,159 @@ package auth; import com.linkedin.common.urn.CorpuserUrn; -import lombok.extern.slf4j.Slf4j; -import play.mvc.Http; - -import javax.annotation.Nonnull; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import play.mvc.Http; @Slf4j public class AuthUtils { - /** - * The config path that determines whether Metadata Service Authentication is enabled. - * - * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid - * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It - * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the - * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. - * - * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise, - * requests will be denied with an Unauthorized error. - */ - public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled"; - - /** - * The attribute inside session cookie representing a GMS-issued access token - */ - public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; - - /** - * An ID used to identify system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; - - /** - * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; - - /** - * Cookie name for redirect url that is manually separated from the session to reduce size - */ - public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; - - public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); - - public static final String LOGIN_ROUTE = "/login"; - public static final String USER_NAME = "username"; - public static final String PASSWORD = "password"; - public static final String ACTOR = "actor"; - public static final String ACCESS_TOKEN = "token"; - public static final String FULL_NAME = "fullName"; - public static final String EMAIL = "email"; - public static final String TITLE = "title"; - public static final String INVITE_TOKEN = "inviteToken"; - public static final String RESET_TOKEN = "resetToken"; - - /** - * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply - * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued - * by the frontend. - * - * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will - * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED. - * - * Returns true if the request is eligible to be forwarded to GMS, false otherwise. - */ - public static boolean isEligibleForForwarding(Http.Request req) { - return hasValidSessionCookie(req) || hasAuthHeader(req); + /** + * The config path that determines whether Metadata Service Authentication is enabled. + * + *

When enabled, the frontend server will proxy requests to the Metadata Service without + * requiring them to have a valid frontend-issued Session Cookie. This effectively means + * delegating the act of authentication to the Metadata Service. It is critical that if Metadata + * Service authentication is enabled at the frontend service layer, it is also enabled in the + * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. + * + *

When disabled, the frontend server will require that all requests have a valid Session + * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error. + */ + public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = + "metadataService.auth.enabled"; + + /** The attribute inside session cookie representing a GMS-issued access token */ + public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; + + /** + * An ID used to identify system callers that are internal to DataHub. Provided via configuration. + */ + public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; + + /** + * An Secret used to authenticate system callers that are internal to DataHub. Provided via + * configuration. + */ + public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; + + /** Cookie name for redirect url that is manually separated from the session to reduce size */ + public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; + + public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); + + public static final String LOGIN_ROUTE = "/login"; + public static final String USER_NAME = "username"; + public static final String PASSWORD = "password"; + public static final String ACTOR = "actor"; + public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; + public static final String BASE_URL = "baseUrl"; + public static final String OIDC_ENABLED = "oidcEnabled"; + public static final String CLIENT_ID = "clientId"; + public static final String CLIENT_SECRET = "clientSecret"; + public static final String DISCOVERY_URI = "discoveryUri"; + + public static final String USER_NAME_CLAIM = "userNameClaim"; + public static final String USER_NAME_CLAIM_REGEX = "userNameClaimRegex"; + public static final String SCOPE = "scope"; + public static final String CLIENT_NAME = "clientName"; + public static final String CLIENT_AUTHENTICATION_METHOD = "clientAuthenticationMethod"; + public static final String JIT_PROVISIONING_ENABLED = "jitProvisioningEnabled"; + public static final String PRE_PROVISIONING_REQUIRED = "preProvisioningRequired"; + public static final String EXTRACT_GROUPS_ENABLED = "extractGroupsEnabled"; + public static final String GROUPS_CLAIM = "groupsClaim"; + public static final String RESPONSE_TYPE = "responseType"; + public static final String RESPONSE_MODE = "responseMode"; + public static final String USE_NONCE = "useNonce"; + public static final String READ_TIMEOUT = "readTimeout"; + public static final String EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "extractJwtAccessTokenClaims"; + // Retained for backwards compatibility + public static final String PREFERRED_JWS_ALGORITHM = "preferredJwsAlgorithm"; + public static final String PREFERRED_JWS_ALGORITHM_2 = "preferredJwsAlgorithm2"; + + /** + * Determines whether the inbound request should be forward to downstream Metadata Service. Today, + * this simply checks for the presence of an "Authorization" header or the presence of a valid + * session cookie issued by the frontend. + * + *

Note that this method DOES NOT actually verify the authentication token of an inbound + * request. That will be handled by the downstream Metadata Service. Until then, the request + * should be treated as UNAUTHENTICATED. + * + *

Returns true if the request is eligible to be forwarded to GMS, false otherwise. + */ + public static boolean isEligibleForForwarding(Http.Request req) { + return hasValidSessionCookie(req) || hasAuthHeader(req); + } + + /** + * Returns true if a request has a valid session cookie issued by the frontend server. Note that + * this DOES NOT verify whether the token within the session cookie will be accepted by the + * downstream GMS service. + * + *

Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, + * as well as their agreement to determine authentication status. + */ + public static boolean hasValidSessionCookie(final Http.Request req) { + Map sessionCookie = req.session().data(); + return sessionCookie.containsKey(ACCESS_TOKEN) + && sessionCookie.containsKey(ACTOR) + && req.getCookie(ACTOR).isPresent() + && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); + } + + /** Returns true if a request includes the Authorization header, false otherwise */ + public static boolean hasAuthHeader(final Http.Request req) { + return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); + } + + /** + * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. + * + * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" + * @param ttlInHours the number of hours until the actor cookie expires after being set + */ + public static Http.Cookie createActorCookie( + @Nonnull final String actorUrn, + @Nonnull final Integer ttlInHours, + @Nonnull final String sameSite, + final boolean isSecure) { + return Http.Cookie.builder(ACTOR, actorUrn) + .withHttpOnly(false) + .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) + .withSameSite(convertSameSiteValue(sameSite)) + .withSecure(isSecure) + .build(); + } + + public static Map createSessionMap( + final String userUrnStr, final String accessToken) { + final Map sessionAttributes = new HashMap<>(); + sessionAttributes.put(ACTOR, userUrnStr); + sessionAttributes.put(ACCESS_TOKEN, accessToken); + return sessionAttributes; + } + + private AuthUtils() {} + + private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { + try { + return Http.Cookie.SameSite.valueOf(sameSiteValue); + } catch (IllegalArgumentException e) { + log.warn( + String.format( + "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), + e); + return Http.Cookie.SameSite.LAX; } - - /** - * Returns true if a request has a valid session cookie issued by the frontend server. - * Note that this DOES NOT verify whether the token within the session cookie will be accepted - * by the downstream GMS service. - * - * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, - * as well as their agreement to determine authentication status. - */ - public static boolean hasValidSessionCookie(final Http.Request req) { - Map sessionCookie = req.session().data(); - return sessionCookie.containsKey(ACCESS_TOKEN) - && sessionCookie.containsKey(ACTOR) - && req.getCookie(ACTOR).isPresent() - && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); - } - - /** - * Returns true if a request includes the Authorization header, false otherwise - */ - public static boolean hasAuthHeader(final Http.Request req) { - return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); - } - - /** - * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. - * - * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" - * @param ttlInHours the number of hours until the actor cookie expires after being set - */ - public static Http.Cookie createActorCookie( - @Nonnull final String actorUrn, - @Nonnull final Integer ttlInHours, - @Nonnull final String sameSite, - final boolean isSecure - ) { - return Http.Cookie.builder(ACTOR, actorUrn) - .withHttpOnly(false) - .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) - .withSameSite(convertSameSiteValue(sameSite)) - .withSecure(isSecure) - .build(); - } - - public static Map createSessionMap(final String userUrnStr, final String accessToken) { - final Map sessionAttributes = new HashMap<>(); - sessionAttributes.put(ACTOR, userUrnStr); - sessionAttributes.put(ACCESS_TOKEN, accessToken); - return sessionAttributes; - } - - private AuthUtils() { } - - private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { - try { - return Http.Cookie.SameSite.valueOf(sameSiteValue); - } catch (IllegalArgumentException e) { - log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e); - return Http.Cookie.SameSite.LAX; - } - } - + } } diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java index ae847b318dce28..8536fc7e016956 100644 --- a/datahub-frontend/app/auth/Authenticator.java +++ b/datahub-frontend/app/auth/Authenticator.java @@ -1,48 +1,49 @@ package auth; +import static auth.AuthUtils.*; + import com.typesafe.config.Config; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.inject.Inject; import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import static auth.AuthUtils.*; - - /** * Implementation of base Play Authentication used to determine if a request to a route should be * authenticated. */ public class Authenticator extends Security.Authenticator { - private final boolean metadataServiceAuthEnabled; + private final boolean metadataServiceAuthEnabled; - @Inject - public Authenticator(@Nonnull Config config) { - this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + @Inject + public Authenticator(@Nonnull Config config) { + this.metadataServiceAuthEnabled = + config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) && config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH); + } + + @Override + public Optional getUsername(@Nonnull Http.Request req) { + if (this.metadataServiceAuthEnabled) { + // If Metadata Service auth is enabled, we only want to verify presence of the + // "Authorization" header OR the presence of a frontend generated session cookie. + // At this time, the actor is still considered to be unauthenicated. + return Optional.ofNullable( + AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); + } else { + // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. + return Optional.ofNullable( + AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); } + } - @Override - public Optional getUsername(@Nonnull Http.Request req) { - if (this.metadataServiceAuthEnabled) { - // If Metadata Service auth is enabled, we only want to verify presence of the - // "Authorization" header OR the presence of a frontend generated session cookie. - // At this time, the actor is still considered to be unauthenicated. - return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); - } else { - // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. - return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); - } - } - - @Override - @Nonnull - public Result onUnauthorized(@Nullable Http.Request req) { - return unauthorized(); - } + @Override + @Nonnull + public Result onUnauthorized(@Nullable Http.Request req) { + return unauthorized(); + } } diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java index e0999ee00be386..9fbed91ce6a10c 100644 --- a/datahub-frontend/app/auth/ConfigUtil.java +++ b/datahub-frontend/app/auth/ConfigUtil.java @@ -3,20 +3,20 @@ import com.typesafe.config.Config; import java.util.Optional; - public class ConfigUtil { - private ConfigUtil() { - } + private ConfigUtil() {} public static String getRequired(final Config configs, final String path) { if (!configs.hasPath(path)) { - throw new IllegalArgumentException(String.format("Missing required config with path %s", path)); + throw new IllegalArgumentException( + String.format("Missing required config with path %s", path)); } return configs.getString(path); } - public static String getOptional(final Config configs, final String path, final String defaultVal) { + public static String getOptional( + final Config configs, final String path, final String defaultVal) { if (!configs.hasPath(path)) { return defaultVal; } diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java index b6da9b7a1833c4..63b2ce61aaf9bb 100644 --- a/datahub-frontend/app/auth/CookieConfigs.java +++ b/datahub-frontend/app/auth/CookieConfigs.java @@ -1,6 +1,5 @@ package auth; - import com.typesafe.config.Config; public class CookieConfigs { @@ -16,12 +15,18 @@ public class CookieConfigs { private final boolean _authCookieSecure; public CookieConfigs(final Config configs) { - _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH) - : DEFAULT_SESSION_TTL_HOURS; - _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE) - : DEFAULT_AUTH_COOKIE_SAME_SITE; - _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE) - : DEFAULT_AUTH_COOKIE_SECURE; + _ttlInHours = + configs.hasPath(SESSION_TTL_CONFIG_PATH) + ? configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + _authCookieSameSite = + configs.hasPath(AUTH_COOKIE_SAME_SITE) + ? configs.getString(AUTH_COOKIE_SAME_SITE) + : DEFAULT_AUTH_COOKIE_SAME_SITE; + _authCookieSecure = + configs.hasPath(AUTH_COOKIE_SECURE) + ? configs.getBoolean(AUTH_COOKIE_SECURE) + : DEFAULT_AUTH_COOKIE_SECURE; } public int getTtlInHours() { diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java index f39c20aceb6f9b..529bf98e1fdcf2 100644 --- a/datahub-frontend/app/auth/JAASConfigs.java +++ b/datahub-frontend/app/auth/JAASConfigs.java @@ -6,17 +6,18 @@ */ public class JAASConfigs { - public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; + public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; - private Boolean _isEnabled = true; + private Boolean _isEnabled = true; - public JAASConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { - _isEnabled = false; - } + public JAASConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) + && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { + _isEnabled = false; } + } - public boolean isJAASEnabled() { - return _isEnabled; - } + public boolean isJAASEnabled() { + return _isEnabled; + } } diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index db17313d67f9a4..772c2c8f92f28c 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -1,23 +1,35 @@ package auth; -/** - * Currently, this config enables or disable native user authentication. - */ +/** Currently, this config enables or disable native user authentication. */ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = + "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; + private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH) - && Boolean.FALSE.equals( - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()))) { - _isEnabled = false; + if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { + _isEnabled = + Boolean.parseBoolean( + configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + } + if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { + _isEnforceValidEmailEnabled = + Boolean.parseBoolean( + configs + .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH) + .toString()); } } public boolean isNativeAuthenticationEnabled() { return _isEnabled; } + + public boolean isEnforceValidEmailEnabled() { + return _isEnforceValidEmailEnabled; + } } diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java index a6dbd69a938893..223ac669bd6eae 100644 --- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java +++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java @@ -7,16 +7,15 @@ import play.api.mvc.FlashCookieBaker; import play.api.mvc.SessionCookieBaker; - public class CustomCookiesModule extends AbstractModule { @Override public void configure() { bind(CookieSigner.class).toProvider(CookieSignerProvider.class); - // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie + // We override the session cookie baker to not use a fallback, this prevents using an old URL + // Encoded cookie bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class); // We don't care about flash cookies, we don't use them bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class); } - } diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 062054173bddb7..976d0826f22770 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -1,25 +1,28 @@ package auth.sso; -import static auth.ConfigUtil.*; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; + +import static auth.AuthUtils.*; /** - * Class responsible for extracting and validating top-level SSO related configurations. + * Class responsible for extracting and validating top-level SSO related configurations. TODO: + * Refactor SsoConfigs to have OidcConfigs and other identity provider specific configs as instance + * variables. SSoManager should ideally not know about identity provider specific configs. */ public class SsoConfigs { - /** - * Required configs - */ + /** Required configs */ private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl"; + private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath"; private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath"; public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled"; - /** - * Default values - */ + /** Default values */ private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback"; + private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/"; private final String _authBaseUrl; @@ -27,19 +30,11 @@ public class SsoConfigs { private final String _authSuccessRedirectPath; private final Boolean _oidcEnabled; - public SsoConfigs(final com.typesafe.config.Config configs) { - _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = getOptional( - configs, - AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, - DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = getOptional( - configs, - AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, - DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + public SsoConfigs(Builder builder) { + _authBaseUrl = builder._authBaseUrl; + _authBaseCallbackPath = builder._authBaseCallbackPath; + _authSuccessRedirectPath = builder._authSuccessRedirectPath; + _oidcEnabled = builder._oidcEnabled; } public String getAuthBaseUrl() { @@ -57,4 +52,52 @@ public String getAuthSuccessRedirectPath() { public Boolean isOidcEnabled() { return _oidcEnabled; } + + public static class Builder> { + protected String _authBaseUrl = null; + private String _authBaseCallbackPath = DEFAULT_BASE_CALLBACK_PATH; + private String _authSuccessRedirectPath = DEFAULT_SUCCESS_REDIRECT_PATH; + protected Boolean _oidcEnabled = false; + private final ObjectMapper _objectMapper = new ObjectMapper(); + protected JsonNode jsonNode = null; + + // No need to check if changes are made since this method is only called at start-up. + public Builder from(final com.typesafe.config.Config configs) { + if (configs.hasPath(AUTH_BASE_URL_CONFIG_PATH)) { + _authBaseUrl = configs.getString(AUTH_BASE_URL_CONFIG_PATH); + } + if (configs.hasPath(AUTH_BASE_CALLBACK_PATH_CONFIG_PATH)) { + _authBaseCallbackPath = configs.getString(AUTH_BASE_CALLBACK_PATH_CONFIG_PATH); + } + if (configs.hasPath(OIDC_ENABLED_CONFIG_PATH)) { + _oidcEnabled = + Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } + if (configs.hasPath(AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH)) { + _authSuccessRedirectPath = configs.getString(AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH); + } + return this; + } + + public Builder from(String ssoSettingsJsonStr) { + try { + jsonNode = _objectMapper.readTree(ssoSettingsJsonStr); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to parse ssoSettingsJsonStr %s into JSON", ssoSettingsJsonStr)); + } + if (jsonNode.has(BASE_URL)) { + _authBaseUrl = jsonNode.get(BASE_URL).asText(); + } + if (jsonNode.has(OIDC_ENABLED)) { + _oidcEnabled = jsonNode.get(OIDC_ENABLED).asBoolean(); + } + + return this; + } + + public SsoConfigs build() { + return new SsoConfigs(this); + } + } } diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index 739ce3f1ba4508..8377eb40e237f7 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -1,24 +1,58 @@ package auth.sso; +import auth.sso.oidc.OidcConfigs; +import auth.sso.oidc.OidcProvider; +import com.datahub.authentication.Authentication; +import java.util.Objects; +import java.util.Optional; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.util.EntityUtils; +import play.mvc.Http; /** * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. + * TODO: Refactor SsoManager to only accept SsoConfigs when initialized. See SsoConfigs TODO as + * well. */ +@Slf4j public class SsoManager { private SsoProvider _provider; // Only one active provider at a time. + private final Authentication + _authentication; // Authentication used to fetch SSO settings from GMS + private final String _ssoSettingsRequestUrl; // SSO settings request URL. + private final CloseableHttpClient _httpClient; // HTTP client for making requests to GMS. + private com.typesafe.config.Config _configs; - public SsoManager() { } + public SsoManager( + com.typesafe.config.Config configs, + Authentication authentication, + String ssoSettingsRequestUrl, + CloseableHttpClient httpClient) { + _configs = configs; + _authentication = Objects.requireNonNull(authentication, "authentication cannot be null"); + _ssoSettingsRequestUrl = + Objects.requireNonNull(ssoSettingsRequestUrl, "ssoSettingsRequestUrl cannot be null"); + _httpClient = Objects.requireNonNull(httpClient, "httpClient cannot be null"); + _provider = null; + } /** - * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been - * provided to the manager. + * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the + * manager. * * @return true if SSO logic is enabled, false otherwise. */ public boolean isSsoEnabled() { + refreshSsoProvider(); return _provider != null; } @@ -27,17 +61,138 @@ public boolean isSsoEnabled() { * * @param provider the new {@link SsoProvider} to be used during authentication. */ - public void setSsoProvider(@Nonnull final SsoProvider provider) { + public void setSsoProvider(final SsoProvider provider) { _provider = provider; } + public void setConfigs(final com.typesafe.config.Config configs) { + _configs = configs; + } + + public void clearSsoProvider() { + _provider = null; + } + /** * Gets the active {@link SsoProvider} instance. * - * @return the {@SsoProvider} that should be used during authentication and on - * IdP callback, or null if SSO is not enabled. + * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or + * null if SSO is not enabled. */ + @Nullable public SsoProvider getSsoProvider() { return _provider; } + + public void initializeSsoProvider() { + SsoConfigs ssoConfigs = null; + try { + ssoConfigs = new SsoConfigs.Builder().from(_configs).build(); + } catch (Exception e) { + // Debug-level logging since this is expected to fail if SSO has not been configured. + log.debug(String.format("Missing SSO settings in static configs %s", _configs), e); + } + + if (ssoConfigs != null && ssoConfigs.isOidcEnabled()) { + try { + OidcConfigs oidcConfigs = new OidcConfigs.Builder().from(_configs).build(); + maybeUpdateOidcProvider(oidcConfigs); + } catch (Exception e) { + // Error-level logging since this is unexpected to fail if SSO has been configured. + log.error(String.format("Error building OidcConfigs from static configs %s", _configs), e); + } + } else { + // Clear the SSO Provider since no SSO is enabled. + clearSsoProvider(); + } + + refreshSsoProvider(); + } + + private void refreshSsoProvider() { + final Optional maybeSsoSettingsJsonStr = getDynamicSsoSettings(); + if (maybeSsoSettingsJsonStr.isEmpty()) { + return; + } + + // If we receive a non-empty response, try to update the SSO provider. + final String ssoSettingsJsonStr = maybeSsoSettingsJsonStr.get(); + SsoConfigs ssoConfigs; + try { + ssoConfigs = new SsoConfigs.Builder().from(ssoSettingsJsonStr).build(); + } catch (Exception e) { + log.error( + String.format( + "Error building SsoConfigs from invalid json %s, reusing previous settings", + ssoSettingsJsonStr), + e); + return; + } + + if (ssoConfigs != null && ssoConfigs.isOidcEnabled()) { + try { + OidcConfigs oidcConfigs = + new OidcConfigs.Builder().from(_configs, ssoSettingsJsonStr).build(); + maybeUpdateOidcProvider(oidcConfigs); + } catch (Exception e) { + log.error( + String.format( + "Error building OidcConfigs from invalid json %s, reusing previous settings", + ssoSettingsJsonStr), + e); + } + } else { + // Clear the SSO Provider since no SSO is enabled. + clearSsoProvider(); + } + } + + private void maybeUpdateOidcProvider(OidcConfigs oidcConfigs) { + SsoProvider existingSsoProvider = getSsoProvider(); + if (existingSsoProvider instanceof OidcProvider) { + OidcProvider existingOidcProvider = (OidcProvider) existingSsoProvider; + // If the existing provider is an OIDC provider and the configs are the same, do nothing. + if (existingOidcProvider.configs().equals(oidcConfigs)) { + return; + } + } + + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + setSsoProvider(oidcProvider); + } + + /** Call the Auth Service to get SSO settings */ + @Nonnull + private Optional getDynamicSsoSettings() { + CloseableHttpResponse response = null; + try { + final HttpPost request = new HttpPost(_ssoSettingsRequestUrl); + + // Build JSON request to verify credentials for a native user. + request.setEntity(new StringEntity("")); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, _authentication.getCredentials()); + + response = _httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { + // Successfully received the SSO settings + return Optional.of(EntityUtils.toString(entity)); + } else { + log.debug("No SSO settings received from Auth Service, reusing previous settings"); + } + } catch (Exception e) { + log.warn("Failed to get SSO settings due to exception, reusing previous settings", e); + } finally { + try { + if (response != null) { + response.close(); + } + } catch (Exception e) { + log.warn("Failed to close http response", e); + } + } + return Optional.empty(); + } } diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java index f7454d599ba995..a0947b52b92ae6 100644 --- a/datahub-frontend/app/auth/sso/SsoProvider.java +++ b/datahub-frontend/app/auth/sso/SsoProvider.java @@ -3,15 +3,10 @@ import org.pac4j.core.client.Client; import org.pac4j.core.credentials.Credentials; -/** - * A thin interface over a Pac4j {@link Client} object and its - * associated configurations. - */ +/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */ public interface SsoProvider { - /** - * The protocol used for SSO. - */ + /** The protocol used for SSO. */ enum SsoProtocol { OIDC("oidc"); // SAML -- not yet supported. @@ -28,19 +23,12 @@ public String getCommonName() { } } - /** - * Returns the configs required by the provider. - */ + /** Returns the configs required by the provider. */ C configs(); - /** - * Returns the SSO protocol associated with the provider instance. - */ + /** Returns the SSO protocol associated with the provider instance. */ SsoProtocol protocol(); - /** - * Retrieves an initialized Pac4j {@link Client}. - */ + /** Retrieves an initialized Pac4j {@link Client}. */ Client client(); - } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java index baca144610ec4c..fa676d2d16c904 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java @@ -1,9 +1,9 @@ package auth.sso.oidc; +import com.nimbusds.jwt.JWT; +import com.nimbusds.jwt.JWTParser; import java.util.Map.Entry; import java.util.Optional; - -import com.nimbusds.jwt.JWTParser; import org.pac4j.core.authorization.generator.AuthorizationGenerator; import org.pac4j.core.context.WebContext; import org.pac4j.core.profile.AttributeLocation; @@ -14,44 +14,43 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.nimbusds.jwt.JWT; - public class OidcAuthorizationGenerator implements AuthorizationGenerator { - private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - - private final ProfileDefinition profileDef; + private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - private final OidcConfigs oidcConfigs; + private final ProfileDefinition profileDef; - public OidcAuthorizationGenerator(final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { - this.profileDef = profileDef; - this.oidcConfigs = oidcConfigs; - } + private final OidcConfigs oidcConfigs; - @Override - public Optional generate(WebContext context, UserProfile profile) { - if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { - try { - final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); - - CommonProfile commonProfile = new CommonProfile(); - - for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { - final String claimName = entry.getKey(); - - if (profile.getAttribute(claimName) == null) { - profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); - } - } - - return Optional.of(commonProfile); - } catch (Exception e) { - logger.warn("Cannot parse access token claims", e); - } + public OidcAuthorizationGenerator( + final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { + this.profileDef = profileDef; + this.oidcConfigs = oidcConfigs; + } + + @Override + public Optional generate(WebContext context, UserProfile profile) { + if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { + try { + final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); + + CommonProfile commonProfile = new CommonProfile(); + + for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { + final String claimName = entry.getKey(); + + if (profile.getAttribute(claimName) == null) { + profileDef.convertAndAdd( + commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); + } } - - return Optional.ofNullable(profile); + + return Optional.of(commonProfile); + } catch (Exception e) { + logger.warn("Cannot parse access token claims", e); + } } - + + return Optional.ofNullable(profile); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index 7164710f4e0ded..510804ba17f1a8 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -1,8 +1,16 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static org.pac4j.play.store.PlayCookieSessionStore.*; +import static play.mvc.Results.internalServerError; + import auth.CookieConfigs; +import auth.sso.SsoManager; import client.AuthServiceClient; -import com.datahub.authentication.Authentication; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import com.linkedin.common.AuditStamp; import com.linkedin.common.CorpGroupUrnArray; import com.linkedin.common.CorpuserUrnArray; @@ -48,6 +56,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; + +import io.datahubproject.metadata.context.OperationContext; import lombok.extern.slf4j.Slf4j; import org.pac4j.core.config.Config; import org.pac4j.core.context.Cookie; @@ -59,52 +69,63 @@ import org.pac4j.core.util.Pac4jConstants; import org.pac4j.play.PlayWebContext; import play.mvc.Result; -import auth.sso.SsoManager; - -import static auth.AuthUtils.*; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static org.pac4j.play.store.PlayCookieSessionStore.*; -import static play.mvc.Results.internalServerError; +import javax.annotation.Nonnull; /** - * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D - * DataHub after an authentication attempt. + * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects + * back to D DataHub after an authentication attempt. * - * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract - * basic information about the user including their name, email, groups, & more. If just-in-time provisioning - * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups - * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist. + *

On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract + * basic information about the user including their name, email, groups, & more. If just-in-time + * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for + * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so + * if the user does not already exist. */ @Slf4j public class OidcCallbackLogic extends DefaultCallbackLogic { - private final SsoManager _ssoManager; - private final SystemEntityClient _entityClient; - private final Authentication _systemAuthentication; - private final AuthServiceClient _authClient; - private final CookieConfigs _cookieConfigs; - - public OidcCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, - final CookieConfigs cookieConfigs) { - _ssoManager = ssoManager; - _systemAuthentication = systemAuthentication; - _entityClient = entityClient; - _authClient = authClient; - _cookieConfigs = cookieConfigs; + private final SsoManager ssoManager; + private final SystemEntityClient systemEntityClient; + private final OperationContext systemOperationContext; + private final AuthServiceClient authClient; + private final CookieConfigs cookieConfigs; + + public OidcCallbackLogic( + final SsoManager ssoManager, + final OperationContext systemOperationContext, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + this.ssoManager = ssoManager; + this.systemOperationContext = systemOperationContext; + systemEntityClient = entityClient; + this.authClient = authClient; + this.cookieConfigs = cookieConfigs; } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { setContextRedirectUrl(context); final Result result = - super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, + super.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, defaultClient); // Handle OIDC authentication errors. @@ -113,20 +134,31 @@ public Result perform(PlayWebContext context, Config config, } // By this point, we know that OIDC is the enabled provider. - final OidcConfigs oidcConfigs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); - return handleOidcCallback(oidcConfigs, result, context, getProfileManager(context)); + final OidcConfigs oidcConfigs = (OidcConfigs) ssoManager.getSsoProvider().configs(); + return handleOidcCallback(systemOperationContext, oidcConfigs, result, getProfileManager(context)); } @SuppressWarnings("unchecked") private void setContextRedirectUrl(PlayWebContext context) { - Optional redirectUrl = context.getRequestCookies().stream() - .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst(); + Optional redirectUrl = + context.getRequestCookies().stream() + .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())) + .findFirst(); redirectUrl.ifPresent( - cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL, - JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); + cookie -> + context + .getSessionStore() + .set( + context, + Pac4jConstants.REQUESTED_URL, + JAVA_SER_HELPER.deserializeFromBytes( + uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); } - private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context, + private Result handleOidcCallback( + final OperationContext opContext, + final OidcConfigs oidcConfigs, + final Result result, final ProfileManager profileManager) { log.debug("Beginning OIDC Callback Handling..."); @@ -134,81 +166,101 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re if (profileManager.isAuthenticated()) { // If authenticated, the user should have a profile. final CommonProfile profile = (CommonProfile) profileManager.get(true).get(); - log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Found authenticated user with profile %s", profile.getAttributes().toString())); // Extract the User name required to log into DataHub. final String userName = extractUserNameOrThrow(oidcConfigs, profile); final CorpuserUrn corpUserUrn = new CorpuserUrn(userName); try { - // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist. + // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does + // not exist. if (oidcConfigs.isJitProvisioningEnabled()) { log.debug("Just-in-time provisioning is enabled. Beginning provisioning process..."); CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile); - tryProvisionUser(extractedUser); + tryProvisionUser(opContext, extractedUser); if (oidcConfigs.isExtractGroupsEnabled()) { // Extract groups & provision them. List extractedGroups = extractGroups(profile); - tryProvisionGroups(extractedGroups); - // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists. - updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups)); + tryProvisionGroups(opContext, extractedGroups); + // Add users to groups on DataHub. Note that this clears existing group membership for a + // user if it already exists. + updateGroupMembership(opContext, corpUserUrn, createGroupMembership(extractedGroups)); } } else if (oidcConfigs.isPreProvisioningRequired()) { // We should only allow logins for user accounts that have been pre-provisioned log.debug("Pre Provisioning is required. Beginning validation of extracted user..."); - verifyPreProvisionedUser(corpUserUrn); + verifyPreProvisionedUser(opContext, corpUserUrn); } // Update user status to active on login. // If we want to prevent certain users from logging in, here's where we'll want to do it. - setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE) - .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()))); + setUserStatus(opContext, + corpUserUrn, + new CorpUserStatus() + .setStatus(Constants.CORP_USER_STATUS_ACTIVE) + .setLastModified( + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()))); } catch (Exception e) { log.error("Failed to perform post authentication steps. Redirecting to error page.", e); return internalServerError( - String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage())); + String.format( + "Failed to perform post authentication steps. Error message: %s", e.getMessage())); } + log.info("OIDC callback authentication successful for user: {}", userName); + // Successfully logged in - Generate GMS login token - final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId()); + final String accessToken = authClient.generateSessionTokenForUser(corpUserUrn.getId()); return result - .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) - .withCookies( - createActorCookie( - corpUserUrn.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) + .withCookies( + createActorCookie( + corpUserUrn.toString(), + cookieConfigs.getTtlInHours(), + cookieConfigs.getAuthCookieSameSite(), + cookieConfigs.getAuthCookieSecure())); } return internalServerError( "Failed to authenticate current user. Cannot find valid identity provider profile in session."); } - private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) { + private String extractUserNameOrThrow( + final OidcConfigs oidcConfigs, final CommonProfile profile) { // Ensure that the attribute exists (was returned by IdP) if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) { - throw new RuntimeException(String.format( - "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", - oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())); + throw new RuntimeException( + String.format( + "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", + oidcConfigs.getUserNameClaim(), + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString())); } final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim()); - final Optional mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); - - return mappedUserName.orElseThrow(() -> new RuntimeException( - String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", - userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()))); + final Optional mappedUserName = + extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); + + return mappedUserName.orElseThrow( + () -> + new RuntimeException( + String.format( + "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", + userNameClaim, + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString()))); } - /** - * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. - */ + /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { - log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); // Extracts these based on the default set of OIDC claims, described here: // https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1 @@ -217,7 +269,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { String email = profile.getEmail(); URI picture = profile.getPictureUrl(); String displayName = profile.getDisplayName(); - String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. + String fullName = + (String) + profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. if (fullName == null && firstName != null && lastName != null) { fullName = String.format("%s %s", firstName, lastName); } @@ -231,7 +285,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { userInfo.setFullName(fullName, SetMode.IGNORE_NULL); userInfo.setEmail(email, SetMode.IGNORE_NULL); // If there is a display name, use it. Otherwise fall back to full name. - userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); + userInfo.setDisplayName( + displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo(); try { @@ -252,38 +307,50 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { return corpUserSnapshot; } + public static Collection getGroupNames(CommonProfile profile, Object groupAttribute, String groupsClaimName) { + Collection groupNames = Collections.emptyList(); + try { + if (groupAttribute instanceof Collection) { + // List of group names + groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class); + } else if (groupAttribute instanceof String) { + String groupString = (String) groupAttribute; + ObjectMapper objectMapper = new ObjectMapper(); + try { + // Json list of group names + groupNames = objectMapper.readValue(groupString, new TypeReference>(){}); + } catch (Exception e) { + groupNames = Arrays.asList(groupString.split(",")); + } + } + } catch (Exception e) { + log.error(String.format( + "Failed to parse group names: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + } + return groupNames; + } private List extractGroups(CommonProfile profile) { - log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString())); - final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); + log.debug( + String.format( + "Attempting to extract groups from OIDC profile %s", + profile.getAttributes().toString())); + final OidcConfigs configs = (OidcConfigs) ssoManager.getSsoProvider().configs(); - // First, attempt to extract a list of groups from the profile, using the group name attribute config. + // First, attempt to extract a list of groups from the profile, using the group name attribute + // config. final List extractedGroups = new ArrayList<>(); final List groupsClaimNames = - new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))).stream() - .map(String::trim) - .collect(Collectors.toList()); + new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))) + .stream().map(String::trim).collect(Collectors.toList()); for (final String groupsClaimName : groupsClaimNames) { if (profile.containsAttribute(groupsClaimName)) { try { final List groupSnapshots = new ArrayList<>(); - final Collection groupNames; - final Object groupAttribute = profile.getAttribute(groupsClaimName); - if (groupAttribute instanceof Collection) { - // List of group names - groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class); - } else if (groupAttribute instanceof String) { - // Single group name - groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); - } else { - log.error( - String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName, - groupAttribute.getClass())); - // Skip over group attribute. Do not throw. - groupNames = Collections.emptyList(); - } + Collection groupNames = getGroupNames(profile, profile.getAttribute(groupsClaimName), groupsClaimName); for (String groupName : groupNames) { // Create a basic CorpGroupSnapshot from the information. @@ -297,7 +364,8 @@ private List extractGroups(CommonProfile profile) { corpGroupInfo.setDisplayName(groupName); // To deal with the possibility of spaces, we url encode the URN group name. - final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); + final String urlEncodedGroupName = + URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName); final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot(); corpGroupSnapshot.setUrn(groupUrn); @@ -306,18 +374,23 @@ private List extractGroups(CommonProfile profile) { corpGroupSnapshot.setAspects(aspects); groupSnapshots.add(corpGroupSnapshot); } catch (UnsupportedEncodingException ex) { - log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName)); + log.error( + String.format( + "Failed to URL encoded extracted group name %s. Skipping", groupName)); } } if (groupSnapshots.isEmpty()) { - log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); + log.warn( + String.format( + "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); } else { extractedGroups.addAll(groupSnapshots); } } catch (Exception e) { - log.error(String.format( - "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", - groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + log.error( + String.format( + "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); } } } @@ -327,17 +400,18 @@ private List extractGroups(CommonProfile profile) { private GroupMembership createGroupMembership(final List extractedGroups) { final GroupMembership groupMembershipAspect = new GroupMembership(); groupMembershipAspect.setGroups( - new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + new UrnArray( + extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); return groupMembershipAspect; } - private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { + private void tryProvisionUser(@Nonnull OperationContext opContext, CorpUserSnapshot corpUserSnapshot) { log.debug(String.format("Attempting to provision user with urn %s", corpUserSnapshot.getUrn())); // 1. Check if this user already exists. try { - final Entity corpUser = _entityClient.get(corpUserSnapshot.getUrn(), _systemAuthentication); + final Entity corpUser = systemEntityClient.get(opContext, corpUserSnapshot.getUrn()); final CorpUserSnapshot existingCorpUserSnapshot = corpUser.getValue().getCorpUserSnapshot(); log.debug(String.format("Fetched GMS user with urn %s", corpUserSnapshot.getUrn())); @@ -345,30 +419,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { // If we find more than the key aspect, then the entity "exists". if (existingCorpUserSnapshot.getAspects().size() <= 1) { log.debug( - String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn())); + String.format( + "Extracted user that does not yet exist %s. Provisioning...", + corpUserSnapshot.getUrn())); // 2. The user does not exist. Provision them. final Entity newEntity = new Entity(); newEntity.setValue(Snapshot.create(corpUserSnapshot)); - _entityClient.update(newEntity, _systemAuthentication); + systemEntityClient.update(opContext, newEntity); log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn())); } - log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); + log.debug( + String.format( + "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); // Otherwise, the user exists. Skip provisioning. } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); + throw new RuntimeException( + String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); } } - private void tryProvisionGroups(List corpGroups) { + private void tryProvisionGroups(@Nonnull OperationContext opContext, List corpGroups) { - log.debug(String.format("Attempting to provision groups with urns %s", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + log.debug( + String.format( + "Attempting to provision groups with urns %s", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); // 1. Check if this user already exists. try { - final Set urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); - final Map existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication); + final Set urnsToFetch = + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); + final Map existingGroups = + systemEntityClient.batchGet(opContext, urnsToFetch); log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet())); @@ -381,15 +464,21 @@ private void tryProvisionGroups(List corpGroups) { // If more than the key aspect exists, then the group already "exists". if (corpGroupSnapshot.getAspects().size() <= 1) { - log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...", - corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + corpGroupSnapshot.getUrn())); groupsToCreate.add(extractedGroup); } - log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); } else { // Should not occur until we stop returning default Key aspects for unrecognized entities. log.debug( - String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn())); + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + extractedGroup.getUrn())); groupsToCreate.add(extractedGroup); } } @@ -400,19 +489,23 @@ private void tryProvisionGroups(List corpGroups) { log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns)); // Now batch create all entities identified to create. - _entityClient.batchUpdate(groupsToCreate.stream() - .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) - .collect(Collectors.toSet()), _systemAuthentication); + systemEntityClient.batchUpdate(opContext, + groupsToCreate.stream() + .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) + .collect(Collectors.toSet())); log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns)); } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision groups with urns %s.", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e); + throw new RuntimeException( + String.format( + "Failed to provision groups with urns %s.", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), + e); } } - private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { + private void updateGroupMembership(@Nonnull OperationContext opContext, Urn urn, GroupMembership groupMembership) { log.debug(String.format("Updating group membership for user %s", urn)); final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); @@ -421,24 +514,31 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { proposal.setAspect(GenericRecordUtils.serializeAspect(groupMembership)); proposal.setChangeType(ChangeType.UPSERT); try { - _entityClient.ingestProposal(proposal, _systemAuthentication); + systemEntityClient.ingestProposal(opContext, proposal); } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to update group membership for user with urn %s", urn), e); } } - private void verifyPreProvisionedUser(CorpuserUrn urn) { - // Validate that the user exists in the system (there is more than just a key aspect for them, as of today). + private void verifyPreProvisionedUser(@Nonnull OperationContext opContext, CorpuserUrn urn) { + // Validate that the user exists in the system (there is more than just a key aspect for them, + // as of today). try { - final Entity corpUser = _entityClient.get(urn, _systemAuthentication); + final Entity corpUser = systemEntityClient.get(opContext, urn); log.debug(String.format("Fetched GMS user with urn %s", urn)); // If we find more than the key aspect, then the entity "exists". if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) { - log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); - throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. " - + "Please contact your DataHub admin to provision an account.", urn)); + log.debug( + String.format( + "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); + throw new RuntimeException( + String.format( + "User with urn %s has not yet been provisioned in DataHub. " + + "Please contact your DataHub admin to provision an account.", + urn)); } // Otherwise, the user exists. } catch (RemoteInvocationException e) { @@ -447,7 +547,7 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) { } } - private void setUserStatus(final Urn urn, final CorpUserStatus newStatus) throws Exception { + private void setUserStatus(@Nonnull OperationContext opContext, final Urn urn, final CorpUserStatus newStatus) throws Exception { // Update status aspect to be active. final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); @@ -455,7 +555,7 @@ private void setUserStatus(final Urn urn, final CorpUserStatus newStatus) throws proposal.setAspectName(Constants.CORP_USER_STATUS_ASPECT_NAME); proposal.setAspect(GenericRecordUtils.serializeAspect(newStatus)); proposal.setChangeType(ChangeType.UPSERT); - _entityClient.ingestProposal(proposal, _systemAuthentication); + systemEntityClient.ingestProposal(opContext, proposal); } private Optional extractRegexGroup(final String patternStr, final String target) { diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index eb037db2ef9c01..080ca236630bf3 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,104 +1,259 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static auth.ConfigUtil.*; + import auth.sso.SsoConfigs; +import java.util.Objects; import java.util.Optional; import lombok.Getter; -import static auth.ConfigUtil.*; - - -/** - * Class responsible for extracting and validating OIDC related configurations. - */ +/** Class responsible for extracting and validating OIDC related configurations. */ @Getter public class OidcConfigs extends SsoConfigs { - /** - * Required configs - */ - public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; - public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; - public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; - - /** - * Optional configs - */ - public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; - public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; - public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; - public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; - public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod"; - public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled"; - public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired"; - public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; - public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. - public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; - public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; - public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; - public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; - public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; - public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; - public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; - - /** - * Default values - */ - private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups. - private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; - private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; - private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; - private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership. - private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; - private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + /** Required configs */ + public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; + + public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; + public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + + /** Optional configs */ + public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; + + public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; + public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; + public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; + public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = + "auth.oidc.clientAuthenticationMethod"; + public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = + "auth.oidc.jitProvisioningEnabled"; + public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = + "auth.oidc.preProvisioningRequired"; + public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; + public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = + "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. + public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; + public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; + public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; + public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; + public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; + public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = + "auth.oidc.extractJwtAccessTokenClaims"; + public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + public static final String OIDC_GRANT_TYPE = "auth.oidc.grantType"; + public static final String OIDC_ACR_VALUES = "auth.oidc.acrValues"; + + /** Default values */ + private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; + + private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; + private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; + // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; + private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; + private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; + private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; + // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; + private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + private final String clientId; + private final String clientSecret; + private final String discoveryUri; + private final String userNameClaim; + private final String userNameClaimRegex; + private final String scope; + private final String clientName; + private final String clientAuthenticationMethod; + private final boolean jitProvisioningEnabled; + private final boolean preProvisioningRequired; + private final boolean extractGroupsEnabled; + private final String groupsClaimName; + private final Optional responseType; + private final Optional responseMode; + private final Optional useNonce; + private final Optional customParamResource; + private final String readTimeout; + private final Optional extractJwtAccessTokenClaims; + private final Optional preferredJwsAlgorithm; + private final Optional grantType; + private final Optional acrValues; + + public OidcConfigs(Builder builder) { + super(builder); + this.clientId = builder.clientId; + this.clientSecret = builder.clientSecret; + this.discoveryUri = builder.discoveryUri; + this.userNameClaim = builder.userNameClaim; + this.userNameClaimRegex = builder.userNameClaimRegex; + this.scope = builder.scope; + this.clientName = builder.clientName; + this.clientAuthenticationMethod = builder.clientAuthenticationMethod; + this.jitProvisioningEnabled = builder.jitProvisioningEnabled; + this.preProvisioningRequired = builder.preProvisioningRequired; + this.extractGroupsEnabled = builder.extractGroupsEnabled; + this.groupsClaimName = builder.groupsClaimName; + this.responseType = builder.responseType; + this.responseMode = builder.responseMode; + this.useNonce = builder.useNonce; + this.customParamResource = builder.customParamResource; + this.readTimeout = builder.readTimeout; + this.extractJwtAccessTokenClaims = builder.extractJwtAccessTokenClaims; + this.preferredJwsAlgorithm = builder.preferredJwsAlgorithm; + this.acrValues = builder.acrValues; + this.grantType = builder.grantType; + } + + public static class Builder extends SsoConfigs.Builder { private String clientId; private String clientSecret; private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional responseType; - private Optional responseMode; - private Optional useNonce; - private Optional customParamResource; - private String readTimeout; - private Optional extractJwtAccessTokenClaims; - private Optional preferredJwsAlgorithm; - - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, - DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = Boolean.parseBoolean( - getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + private String userNameClaim = DEFAULT_OIDC_USERNAME_CLAIM; + private String userNameClaimRegex = DEFAULT_OIDC_USERNAME_CLAIM_REGEX; + private String scope = DEFAULT_OIDC_SCOPE; + private String clientName = DEFAULT_OIDC_CLIENT_NAME; + private String clientAuthenticationMethod = DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD; + private boolean jitProvisioningEnabled = + Boolean.parseBoolean(DEFAULT_OIDC_JIT_PROVISIONING_ENABLED); + private boolean preProvisioningRequired = + Boolean.parseBoolean(DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED); + private boolean extractGroupsEnabled = + Boolean.parseBoolean(DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED); + private String groupsClaimName = DEFAULT_OIDC_GROUPS_CLAIM; + private Optional responseType = Optional.empty(); + private Optional responseMode = Optional.empty(); + private Optional useNonce = Optional.empty(); + private Optional customParamResource = Optional.empty(); + private String readTimeout = DEFAULT_OIDC_READ_TIMEOUT; + private Optional extractJwtAccessTokenClaims = Optional.empty(); + private Optional preferredJwsAlgorithm = Optional.empty(); + private Optional grantType = Optional.empty(); + private Optional acrValues = Optional.empty(); + + public Builder from(final com.typesafe.config.Config configs) { + super.from(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( + getOptional( + configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); + groupsClaimName = + getOptional( + configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + grantType = Optional.ofNullable(getOptional(configs, OIDC_GRANT_TYPE, null)); + acrValues = Optional.ofNullable(getOptional(configs, OIDC_ACR_VALUES, null)); + return this; + } + + public Builder from(final com.typesafe.config.Config configs, final String ssoSettingsJsonStr) { + super.from(ssoSettingsJsonStr); + if (jsonNode.has(CLIENT_ID)) { + clientId = jsonNode.get(CLIENT_ID).asText(); + } + if (jsonNode.has(CLIENT_SECRET)) { + clientSecret = jsonNode.get(CLIENT_SECRET).asText(); + } + if (jsonNode.has(DISCOVERY_URI)) { + discoveryUri = jsonNode.get(DISCOVERY_URI).asText(); + } + if (jsonNode.has(USER_NAME_CLAIM)) { + userNameClaim = jsonNode.get(USER_NAME_CLAIM).asText(); + } + if (jsonNode.has(USER_NAME_CLAIM_REGEX)) { + userNameClaimRegex = jsonNode.get(USER_NAME_CLAIM_REGEX).asText(); + } + if (jsonNode.has(SCOPE)) { + scope = jsonNode.get(SCOPE).asText(); + } + if (jsonNode.has(CLIENT_NAME)) { + clientName = jsonNode.get(CLIENT_NAME).asText(); + } + if (jsonNode.has(CLIENT_AUTHENTICATION_METHOD)) { + clientAuthenticationMethod = jsonNode.get(CLIENT_AUTHENTICATION_METHOD).asText(); + } + if (jsonNode.has(JIT_PROVISIONING_ENABLED)) { + jitProvisioningEnabled = jsonNode.get(JIT_PROVISIONING_ENABLED).asBoolean(); + } + if (jsonNode.has(PRE_PROVISIONING_REQUIRED)) { + preProvisioningRequired = jsonNode.get(PRE_PROVISIONING_REQUIRED).asBoolean(); + } + if (jsonNode.has(EXTRACT_GROUPS_ENABLED)) { + extractGroupsEnabled = jsonNode.get(EXTRACT_GROUPS_ENABLED).asBoolean(); + } + if (jsonNode.has(GROUPS_CLAIM)) { + groupsClaimName = jsonNode.get(GROUPS_CLAIM).asText(); + } + if (jsonNode.has(RESPONSE_TYPE)) { + responseType = Optional.of(jsonNode.get(RESPONSE_TYPE).asText()); + } + if (jsonNode.has(RESPONSE_MODE)) { + responseMode = Optional.of(jsonNode.get(RESPONSE_MODE).asText()); + } + if (jsonNode.has(USE_NONCE)) { + useNonce = Optional.of(jsonNode.get(USE_NONCE).asBoolean()); + } + if (jsonNode.has(READ_TIMEOUT)) { + readTimeout = jsonNode.get(READ_TIMEOUT).asText(); + } + if (jsonNode.has(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS)) { + extractJwtAccessTokenClaims = + Optional.of(jsonNode.get(EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).asBoolean()); + } + if (jsonNode.has(PREFERRED_JWS_ALGORITHM)) { + preferredJwsAlgorithm = Optional.of(jsonNode.get(PREFERRED_JWS_ALGORITHM).asText()); + } else { + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } + + return this; + } + + public OidcConfigs build() { + Objects.requireNonNull(_oidcEnabled, "oidcEnabled is required"); + Objects.requireNonNull(clientId, "clientId is required"); + Objects.requireNonNull(clientSecret, "clientSecret is required"); + Objects.requireNonNull(discoveryUri, "discoveryUri is required"); + Objects.requireNonNull(_authBaseUrl, "authBaseUrl is required"); + + return new OidcConfigs(this); } + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index fd0a2e1877154e..a8a3205e8299c8 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -3,6 +3,8 @@ import auth.sso.SsoProvider; import auth.sso.oidc.custom.CustomOidcClient; import com.google.common.collect.ImmutableMap; +import java.util.HashMap; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.pac4j.core.client.Client; import org.pac4j.core.http.callback.PathParameterCallbackUrlResolver; @@ -10,15 +12,15 @@ import org.pac4j.oidc.credentials.OidcCredentials; import org.pac4j.oidc.profile.OidcProfileDefinition; - /** * Implementation of {@link SsoProvider} supporting the OIDC protocol. * - * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related - * configuration options, which reside in an instance of {@link OidcConfigs}. + *

This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC + * related configuration options, which reside in an instance of {@link OidcConfigs}. * - * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that - * this class is not related to the logic performed when an IdP performs a callback to DataHub. + *

It is responsible for initializing this client from a configuration object ({@link + * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a + * callback to DataHub. */ @Slf4j public class OidcProvider implements SsoProvider { @@ -53,7 +55,8 @@ private Client createPac4jClient() { oidcConfiguration.setClientId(_oidcConfigs.getClientId()); oidcConfiguration.setSecret(_oidcConfigs.getClientSecret()); oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri()); - oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod()); + oidcConfiguration.setClientAuthenticationMethodAsString( + _oidcConfigs.getClientAuthenticationMethod()); oidcConfiguration.setScope(_oidcConfigs.getScope()); try { oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout())); @@ -63,18 +66,34 @@ private Client createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); - _oidcConfigs.getCustomParamResource() - .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); - _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> { - log.info("Setting preferredJwsAlgorithm: " + preferred); - oidcConfiguration.setPreferredJwsAlgorithm(preferred); - }); + Map customParamsMap = new HashMap<>(); + _oidcConfigs + .getCustomParamResource() + .ifPresent(value -> customParamsMap.put("resource", value)); + _oidcConfigs + .getGrantType() + .ifPresent(value -> customParamsMap.put("grant_type", value)); + _oidcConfigs + .getAcrValues() + .ifPresent(value -> customParamsMap.put("acr_values", value)); + if (!customParamsMap.isEmpty()) { + oidcConfiguration.setCustomParams(customParamsMap); + } + _oidcConfigs + .getPreferredJwsAlgorithm() + .ifPresent( + preferred -> { + log.info("Setting preferredJwsAlgorithm: " + preferred); + oidcConfiguration.setPreferredJwsAlgorithm(preferred); + }); final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration); oidcClient.setName(OIDC_CLIENT_NAME); - oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); + oidcClient.setCallbackUrl( + _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver()); - oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); + oidcClient.addAuthorizationGenerator( + new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); return oidcClient; } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java index 014632c17e690f..9881b5e095b781 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java @@ -1,57 +1,58 @@ package auth.sso.oidc; +import static play.mvc.Results.internalServerError; +import static play.mvc.Results.unauthorized; + +import java.util.Optional; import org.pac4j.play.PlayWebContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.mvc.Result; -import java.util.Optional; - -import static play.mvc.Results.internalServerError; -import static play.mvc.Results.unauthorized; - - public class OidcResponseErrorHandler { - private OidcResponseErrorHandler() { - - } - - private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); + private OidcResponseErrorHandler() {} - private static final String ERROR_FIELD_NAME = "error"; - private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; + private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); - public static Result handleError(final PlayWebContext context) { + private static final String ERROR_FIELD_NAME = "error"; + private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; - _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'", - getError(context), - getErrorDescription(context)); + public static Result handleError(final PlayWebContext context) { - if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { - return unauthorized(String.format("Access denied. " - + "The OIDC service responded with 'Access denied'. " - + "It seems that you don't have access to this application yet. Please apply for access. \n\n" - + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " - + "Error details: '%s':'%s'", - context.getRequestParameter("error"), - context.getRequestParameter("error_description"))); - } + _logger.warn( + "OIDC responded with an error: '{}'. Error description: '{}'", + getError(context), + getErrorDescription(context)); - return internalServerError( - String.format("Internal server error. The OIDC service responded with an error: '%s'.\n" - + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse(""))); + if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { + return unauthorized( + String.format( + "Access denied. " + + "The OIDC service responded with 'Access denied'. " + + "It seems that you don't have access to this application yet. Please apply for access. \n\n" + + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " + + "Error details: '%s':'%s'", + context.getRequestParameter("error"), + context.getRequestParameter("error_description"))); } - public static boolean isError(final PlayWebContext context) { - return getError(context).isPresent() && !getError(context).get().isEmpty(); - } + return internalServerError( + String.format( + "Internal server error. The OIDC service responded with an error: '%s'.\n" + + "Error description: '%s'", + getError(context).orElse(""), getErrorDescription(context).orElse(""))); + } - public static Optional getError(final PlayWebContext context) { - return context.getRequestParameter(ERROR_FIELD_NAME); - } + public static boolean isError(final PlayWebContext context) { + return getError(context).isPresent() && !getError(context).get().isEmpty(); + } - public static Optional getErrorDescription(final PlayWebContext context) { - return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); - } + public static Optional getError(final PlayWebContext context) { + return context.getRequestParameter(ERROR_FIELD_NAME); + } + + public static Optional getErrorDescription(final PlayWebContext context) { + return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java index 8c8c250fb7e639..01f8f16171d133 100644 --- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java +++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java @@ -1,8 +1,8 @@ package auth.sso.oidc.custom; -import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenErrorResponse; import com.nimbusds.oauth2.sdk.TokenRequest; @@ -37,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class CustomOidcAuthenticator implements Authenticator { private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class); @@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient client) { this.client = client; // check authentication methods - final List metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods(); + final List metadataMethods = + configuration.findProviderMetadata().getTokenEndpointAuthMethods(); - final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration); + final ClientAuthenticationMethod preferredMethod = + getPreferredAuthenticationMethod(configuration); final ClientAuthenticationMethod chosenMethod; if (CommonHelper.isNotEmpty(metadataMethods)) { if (preferredMethod != null) { - if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) { + if (ClientAuthenticationMethod.NONE.equals(preferredMethod) + || metadataMethods.contains(preferredMethod)) { chosenMethod = preferredMethod; } else { throw new TechnicalException( @@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient client) { chosenMethod = firstSupportedMethod(metadataMethods); } } else { - chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); - logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}", + chosenMethod = + preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); + logger.info( + "Provider metadata does not provide Token endpoint authentication methods. Using: {}", chosenMethod); } @@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient client) { } /** - * The preferred {@link ClientAuthenticationMethod} specified in the given - * {@link OidcConfiguration}, or null meaning that the a - * provider-supported method should be chosen. + * The preferred {@link ClientAuthenticationMethod} specified in the given {@link + * OidcConfiguration}, or null meaning that the a provider-supported method should be + * chosen. */ - private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) { + private static ClientAuthenticationMethod getPreferredAuthenticationMethod( + OidcConfiguration config) { final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod(); if (configurationMethod == null) { return null; } if (!SUPPORTED_METHODS.contains(configurationMethod)) { - throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported."); + throw new TechnicalException( + "Configured authentication method (" + configurationMethod + ") is not supported."); } return configurationMethod; } /** - * The first {@link ClientAuthenticationMethod} from the given list of - * methods that is supported by this implementation. + * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported + * by this implementation. * - * @throws TechnicalException - * if none of the provider-supported methods is supported. + * @throws TechnicalException if none of the provider-supported methods is supported. */ - private static ClientAuthenticationMethod firstSupportedMethod(final List metadataMethods) { + private static ClientAuthenticationMethod firstSupportedMethod( + final List metadataMethods) { Optional firstSupported = metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst(); if (firstSupported.isPresent()) { return firstSupported.get(); } else { - throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: " - + metadataMethods); + throw new TechnicalException( + "None of the Token endpoint provider metadata authentication methods are supported: " + + metadataMethods); } } @@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context if (code != null) { try { final String computedCallbackUrl = client.computeFinalCallbackUrl(context); - CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever() - .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null); + CodeVerifier verifier = + (CodeVerifier) + configuration + .getValueRetriever() + .retrieve(client.getCodeVerifierSessionAttributeName(), client, context) + .orElse(null); // Token request - final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); + final TokenRequest request = + createTokenRequest( + new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); HTTPRequest tokenHttpRequest = request.toHTTPRequest(); tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout()); tokenHttpRequest.setReadTimeout(configuration.getReadTimeout()); final HTTPResponse httpResponse = tokenHttpRequest.send(); - logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(), + logger.debug( + "Token response: status={}, content={}", + httpResponse.getStatusCode(), httpResponse.getContent()); final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse); if (response instanceof TokenErrorResponse) { - throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); + throw new TechnicalException( + "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); } logger.debug("Token response successful"); final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response; @@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context private TokenRequest createTokenRequest(final AuthorizationGrant grant) { if (clientAuthentication != null) { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - this.clientAuthentication, grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + this.clientAuthentication, + grant); } else { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - new ClientID(configuration.getClientId()), grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + new ClientID(configuration.getClientId()), + grant); } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 24183f5c625da9..30f841d10b4bfd 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,7 +3,7 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; - +import com.google.inject.Inject; import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -17,17 +17,16 @@ import org.apache.http.util.EntityUtils; import play.mvc.Http; - -/** - * This class is responsible for coordinating authentication with the backend Metadata Service. - */ +/** This class is responsible for coordinating authentication with the backend Metadata Service. */ @Slf4j public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; private static final String SIGN_UP_ENDPOINT = "auth/signUp"; - private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; - private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/verifyNativeUserCredentials"; private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; @@ -39,7 +38,8 @@ public class AuthServiceClient { private static final String INVITE_TOKEN_FIELD = "inviteToken"; private static final String RESET_TOKEN_FIELD = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; @@ -48,8 +48,12 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; - public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + @Inject + public AuthServiceClient( + @Nonnull final String metadataServiceHost, + @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, + @Nonnull final Authentication systemAuthentication, @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); @@ -59,10 +63,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin } /** - * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails. + * Call the Auth Service to generate a session token for a particular user with a unique actor id, + * or throws an exception if generation fails. * - * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type - * USER. + *

Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of + * an Actor of type USER. */ @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { @@ -70,17 +75,24 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { CloseableHttpResponse response = null; try { - final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - GENERATE_SESSION_TOKEN_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + GENERATE_SESSION_TOKEN_ENDPOINT)); + + log.info("Requesting session token for user: {}", userId); // Build JSON request to generate a token on behalf of a user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_ID_FIELD, userId); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -89,15 +101,17 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { response = httpClient.execute(request); final HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { - // Successfully generated a token for the User + log.info("Successfully received session token for user: {}", userId); final String jsonStr = EntityUtils.toString(entity); return getAccessTokenFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", + String.format( + "Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { + log.error("Failed to generate session token for user: {}", userId, e); throw new RuntimeException("Failed to generate session token for user", e); } finally { try { @@ -110,11 +124,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } - /** - * Call the Auth Service to create a native Datahub user. - */ - public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, - @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + /** Call the Auth Service to create a native Datahub user. */ + public boolean signUp( + @Nonnull final String userUrn, + @Nonnull final String fullName, + @Nonnull final String email, + @Nonnull final String title, + @Nonnull final String password, + @Nonnull final String inviteToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(fullName, "fullName must not be null"); Objects.requireNonNull(email, "email must not be null"); @@ -126,9 +143,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - SIGN_UP_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT)); // Build JSON request to sign up a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -139,7 +158,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN objectNode.put(TITLE_FIELD, title); objectNode.put(PASSWORD_FIELD, password); objectNode.put(INVITE_TOKEN_FIELD, inviteToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -152,11 +172,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN final String jsonStr = EntityUtils.toString(entity); return getIsNativeUserCreatedFromJson(jsonStr); } else { - String content = response.getEntity().getContent() == null ? "" : new String( - response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + String content = + response.getEntity().getContent() == null + ? "" + : new String( + response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(), - response.getEntity().toString(), content)); + String.format( + "Bad response from the Metadata Service: %s %s Body: %s", + response.getStatusLine().toString(), response.getEntity().toString(), content)); } } catch (Exception e) { throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); @@ -171,10 +195,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN } } - /** - * Call the Auth Service to reset credentials for a native DataHub user. - */ - public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + /** Call the Auth Service to reset credentials for a native DataHub user. */ + public boolean resetNativeUserCredentials( + @Nonnull final String userUrn, + @Nonnull final String password, @Nonnull final String resetToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); @@ -184,9 +208,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -194,7 +223,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); objectNode.put(RESET_TOKEN_FIELD, resetToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -208,8 +238,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul return getAreNativeUserCredentialsResetFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to reset credentials for user", e); @@ -224,10 +255,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul } } - /** - * Call the Auth Service to verify the credentials for a native Datahub user. - */ - public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + /** Call the Auth Service to verify the credentials for a native Datahub user. */ + public boolean verifyNativeUserCredentials( + @Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); CloseableHttpResponse response = null; @@ -235,16 +265,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -258,8 +294,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu return getDoesPasswordMatchFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to verify credentials for user", e); @@ -274,18 +311,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu } } - /** - * Call the Auth Service to track an analytics event - */ + /** Call the Auth Service to track an analytics event */ public void track(@Nonnull final String event) { Objects.requireNonNull(event, "event must not be null"); CloseableHttpResponse response = null; try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - TRACK_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT)); // Build JSON request to track event. request.setEntity(new StringEntity(event, StandardCharsets.UTF_8)); @@ -298,8 +335,9 @@ public void track(@Nonnull final String event) { if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to track event", e); diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index fab17f9215d4a2..b7173684b63500 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -1,6 +1,17 @@ package client; +import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; +import config.ConfigurationProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.inject.Inject; +import javax.inject.Singleton; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -13,96 +24,141 @@ import play.api.inject.ApplicationLifecycle; import utils.ConfigUtil; -import javax.inject.Inject; - -import javax.annotation.Nonnull; -import javax.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - @Singleton public class KafkaTrackingProducer { - private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); - private static final List KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), - SecurityProtocol.SASL_PLAINTEXT.name())); - - private final Boolean _isEnabled; - private final KafkaProducer _producer; - - @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle) { - _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); - - if (_isEnabled) { - _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config); - - lifecycle.addStopHook( - () -> { - _producer.flush(); - _producer.close(); - return CompletableFuture.completedFuture(null); - }); - } else { - _logger.debug("Analytics tracking is disabled"); - _producer = null; - } - } - - public Boolean isEnabled() { - return _isEnabled; + private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); + private static final List KAFKA_SSL_PROTOCOLS = + Collections.unmodifiableList( + Arrays.asList( + SecurityProtocol.SSL.name(), + SecurityProtocol.SASL_SSL.name(), + SecurityProtocol.SASL_PLAINTEXT.name())); + + private final Boolean _isEnabled; + private final KafkaProducer _producer; + + @Inject + public KafkaTrackingProducer( + @Nonnull Config config, + ApplicationLifecycle lifecycle, + final ConfigurationProvider configurationProvider) { + _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); + + if (_isEnabled) { + _logger.debug("Analytics tracking is enabled"); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); + + lifecycle.addStopHook( + () -> { + _producer.flush(); + _producer.close(); + return CompletableFuture.completedFuture(null); + }); + } else { + _logger.debug("Analytics tracking is disabled"); + _producer = null; } - - public void send(ProducerRecord record) { - _producer.send(record); + } + + public Boolean isEnabled() { + return _isEnabled; + } + + public void send(ProducerRecord record) { + _producer.send(record); + } + + private static KafkaProducer createKafkaProducer( + Config config, ProducerConfiguration producerConfiguration) { + final Properties props = new Properties(); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + config.getString("analytics.kafka.delivery.timeout.ms")); + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + config.getString("analytics.kafka.bootstrap.server")); + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); + + final String securityProtocolConfig = "analytics.kafka.security.protocol"; + if (config.hasPath(securityProtocolConfig) + && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { + props.put( + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); + setConfig( + config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); + + setConfig( + config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.keystore.location"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.keystore.password"); + + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + "analytics.kafka.ssl.truststore.type"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.truststore.location"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.truststore.password"); + + setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); + setConfig( + config, + props, + SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, + "analytics.kafka.ssl.endpoint.identification.algorithm"); + + final String securityProtocol = config.getString(securityProtocolConfig); + if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) + || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { + setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); + setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); + setConfig( + config, + props, + SaslConfigs.SASL_KERBEROS_SERVICE_NAME, + "analytics.kafka.sasl.kerberos.service.name"); + setConfig( + config, + props, + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.login.callback.handler.class"); + setConfig( + config, + props, + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.client.callback.handler.class"); + } } - private static KafkaProducer createKafkaProducer(Config config) { - final Properties props = new Properties(); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. - - final String securityProtocolConfig = "analytics.kafka.security.protocol"; - if (config.hasPath(securityProtocolConfig) - && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { - props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); - setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); - - setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password"); - - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password"); - - setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); - setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm"); - - final String securityProtocol = config.getString(securityProtocolConfig); - if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) - || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { - setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); - setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); - setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name"); - setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class"); - setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class"); - } - } - - return new org.apache.kafka.clients.producer.KafkaProducer(props); - } + return new org.apache.kafka.clients.producer.KafkaProducer(props); + } - private static void setConfig(Config config, Properties props, String key, String configKey) { - Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) - .ifPresent(v -> props.put(key, v)); - } + private static void setConfig(Config config, Properties props, String key, String configKey) { + Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) + .ifPresent(v -> props.put(key, v)); + } } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 00a5472ec34763..d447b28cdcc465 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -1,27 +1,36 @@ package config; +import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.cache.CacheConfiguration; +import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; - +import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.PropertySource; - +import org.springframework.stereotype.Component; /** - * Minimal sharing between metadata-service and frontend - * Initially for use of client caching configuration. - * Does not use the factories module to avoid transitive dependencies. + * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid + * transitive dependencies. */ @EnableConfigurationProperties -@PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class) +@PropertySource(value = "classpath:/application.yaml", factory = YamlPropertySourceFactory.class) @ConfigurationProperties @Data public class ConfigurationProvider { + /** Kafka related configs. */ + private KafkaConfiguration kafka; + + /** Configuration for caching */ + private CacheConfiguration cache; + + /** Configuration for the view layer */ + private VisualConfiguration visualConfig; - /** - * Configuration for caching - */ - private CacheConfiguration cache; + /** Configuration for authorization */ + private AuthorizationConfiguration authorization; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 5c76f2572a9360..d17e600aadc072 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -1,5 +1,8 @@ package controllers; +import static auth.AuthUtils.ACTOR; +import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; + import akka.actor.ActorSystem; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -9,41 +12,36 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.util.Pair; import com.typesafe.config.Config; - +import java.io.InputStream; +import java.net.URI; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.Environment; import play.http.HttpEntity; +import play.libs.Json; import play.libs.ws.InMemoryBodyWritable; import play.libs.ws.StandaloneWSClient; -import play.libs.Json; import play.libs.ws.ahc.StandaloneAhcWSClient; import play.mvc.Controller; import play.mvc.Http; import play.mvc.ResponseHeader; import play.mvc.Result; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.inject.Inject; -import java.io.InputStream; import play.mvc.Security; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig; import utils.ConfigUtil; -import java.time.Duration; - -import static auth.AuthUtils.ACTOR; -import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; - public class Application extends Controller { private final Logger _logger = LoggerFactory.getLogger(Application.class.getName()); @@ -61,22 +59,17 @@ public Application(Environment environment, @Nonnull Config config) { /** * Serves the build output index.html for any given path * - * @param path takes a path string, which essentially is ignored - * routing is managed client side + * @param path takes a path string, which essentially is ignored routing is managed client side * @return {Result} build output index.html resource */ @Nonnull private Result serveAsset(@Nullable String path) { try { InputStream indexHtml = _environment.resourceAsStream("public/index.html"); - return ok(indexHtml) - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html"); } catch (Exception e) { _logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?"); - return notFound() - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return notFound().withHeader("Cache-Control", "no-cache").as("text/html"); } } @@ -99,66 +92,93 @@ public Result index(@Nullable String path) { /** * Proxies requests to the Metadata Service * - * TODO: Investigate using mutual SSL authentication to call Metadata Service. + *

TODO: Investigate using mutual SSL authentication to call Metadata Service. */ @Security.Authenticated(Authenticator.class) - public CompletableFuture proxy(String path, Http.Request request) throws ExecutionException, InterruptedException { + public CompletableFuture proxy(String path, Http.Request request) + throws ExecutionException, InterruptedException { final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request); final String resolvedUri = mapPath(request.uri()); - final String metadataServiceHost = ConfigUtil.getString( - _config, - ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = ConfigUtil.getInt( - _config, - ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = ConfigUtil.getBoolean( - _config, - ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); + final String metadataServiceHost = + ConfigUtil.getString( + _config, + ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); + final int metadataServicePort = + ConfigUtil.getInt( + _config, + ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); + final boolean metadataServiceUseSsl = + ConfigUtil.getBoolean( + _config, + ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); // TODO: Fully support custom internal SSL. final String protocol = metadataServiceUseSsl ? "https" : "http"; final Map> headers = request.getHeaders().toMap(); - if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { - headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); + if (headers.containsKey(Http.HeaderNames.HOST) + && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { + headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } - return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) + if (!headers.containsKey(Http.HeaderNames.X_FORWARDED_PROTO)) { + final String schema = + Optional.ofNullable(URI.create(request.uri()).getScheme()).orElse("http"); + headers.put(Http.HeaderNames.X_FORWARDED_PROTO, List.of(schema)); + } + + return _ws.url( + String.format( + "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) .setMethod(request.method()) - .setHeaders(headers - .entrySet() - .stream() - // Remove X-DataHub-Actor to prevent malicious delegation. - .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) - // Remove Host s.th. service meshes do not route to wrong host - .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ) + .setHeaders( + headers.entrySet().stream() + // Remove X-DataHub-Actor to prevent malicious delegation. + .filter( + entry -> + !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase( + entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) + // Remove Host s.th. service meshes do not route to wrong host + .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue) - .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) - .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json")) + .addHeader( + AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) + .setBody( + new InMemoryBodyWritable( + ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), + request.contentType().orElse("application/json"))) .setRequestTimeout(Duration.ofSeconds(120)) .execute() - .thenApply(apiResponse -> { - final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders() - .entrySet() - .stream() - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); - final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType())); - return new Result(header, body); - }).toCompletableFuture(); + .thenApply( + apiResponse -> { + final ResponseHeader header = + new ResponseHeader( + apiResponse.getStatus(), + apiResponse.getHeaders().entrySet().stream() + .filter( + entry -> + !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter( + entry -> + !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); + final HttpEntity body = + new HttpEntity.Strict( + apiResponse.getBodyAsBytes(), + Optional.ofNullable(apiResponse.getContentType())); + return new Result(header, body); + }) + .toCompletableFuture(); } /** @@ -173,11 +193,13 @@ public Result appConfig() { config.put("appVersion", _config.getString("app.version")); config.put("isInternal", _config.getBoolean("linkedin.internal")); config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage")); - config.put("suggestionConfidenceThreshold", + config.put( + "suggestionConfidenceThreshold", Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold"))); config.set("wikiLinks", wikiLinks()); config.set("tracking", trackingInfo()); - // In a staging environment, we can trigger this flag to be true so that the UI can handle based on + // In a staging environment, we can trigger this flag to be true so that the UI can handle based + // on // such config and alert users that their changes will not affect production data config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner")); config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner")); @@ -206,6 +228,7 @@ public Result appConfig() { /** * Creates a JSON object of profile / avatar properties + * * @return Json avatar / profile image properties */ @Nonnull @@ -273,23 +296,26 @@ private StandaloneWSClient createWsClient() { } /** - * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service. + * Returns the value of the Authorization Header to be provided when proxying requests to the + * downstream Metadata Service. * - * Currently, the Authorization header value may be derived from + *

Currently, the Authorization header value may be derived from * - * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set - * when creating the session token initially from a token granted by the Metadata Service. + *

a) The value of the "token" attribute of the Session Cookie provided by the client. This + * value is set when creating the session token initially from a token granted by the Metadata + * Service. * - * Or if the "token" attribute cannot be found in a session cookie, then we fallback to + *

Or if the "token" attribute cannot be found in a session cookie, then we fallback to * - * b) The value of the Authorization - * header provided in the original request. This will be used in cases where clients are making programmatic requests - * to Metadata Service APIs directly, without providing a session cookie (ui only). + *

b) The value of the Authorization header provided in the original request. This will be used + * in cases where clients are making programmatic requests to Metadata Service APIs directly, + * without providing a session cookie (ui only). * - * If neither are found, an empty string is returned. + *

If neither are found, an empty string is returned. */ private String getAuthorizationHeaderValueToProxy(Http.Request request) { - // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply + // If the session cookie has an authorization token, use that. If there's an authorization + // header provided, simply // use that. String value = ""; if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) { @@ -301,11 +327,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) { } /** - * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along - * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility. + * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This + * is sent along with any requests that have a valid frontend session cookie to identify the + * calling actor, for backwards compatibility. * - * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come - * from the authentication credentials provided in the Authorization header. + *

If Metadata Service authentication is enabled, this value is not required because Actor + * context will most often come from the authentication credentials provided in the Authorization + * header. */ private String getDataHubActorHeader(Http.Request request) { String actor = request.session().data().get(ACTOR); diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index 4f89f4f67e1499..87c4b5ba06793b 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -1,5 +1,9 @@ package controllers; +import static auth.AuthUtils.*; +import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; +import static org.pac4j.play.store.PlayCookieSessionStore.*; + import auth.AuthUtils; import auth.CookieConfigs; import auth.JAASConfigs; @@ -11,12 +15,15 @@ import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.typesafe.config.Config; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Optional; import javax.annotation.Nonnull; import javax.inject.Inject; +import org.apache.commons.httpclient.InvalidRedirectLocationException; import org.apache.commons.lang3.StringUtils; import org.pac4j.core.client.Client; import org.pac4j.core.context.Cookie; @@ -27,6 +34,7 @@ import org.pac4j.play.store.PlaySessionStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import play.data.validation.Constraints; import play.libs.Json; import play.mvc.Controller; import play.mvc.Http; @@ -34,318 +42,349 @@ import play.mvc.Results; import security.AuthenticationManager; -import static auth.AuthUtils.*; -import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; -import static org.pac4j.play.store.PlayCookieSessionStore.*; - - -// TODO add logging. public class AuthenticationController extends Controller { - public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; - private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; - private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; - private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; - - private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp"; - - private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); - private final CookieConfigs _cookieConfigs; - private final JAASConfigs _jaasConfigs; - private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; - private final boolean _verbose; - - @Inject - private org.pac4j.core.config.Config _ssoConfig; - - @Inject - private PlaySessionStore _playSessionStore; - - @Inject - private SsoManager _ssoManager; - - @Inject - AuthServiceClient _authClient; - - @Inject - public AuthenticationController(@Nonnull Config configs) { - _cookieConfigs = new CookieConfigs(configs); - _jaasConfigs = new JAASConfigs(configs); - _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); - _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; + private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; + private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; + private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; + + private static final String SSO_NO_REDIRECT_MESSAGE = + "SSO is configured, however missing redirect from idp"; + + private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); + private final CookieConfigs _cookieConfigs; + private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; + private final boolean _verbose; + + @Inject private org.pac4j.core.config.Config _ssoConfig; + + @Inject private PlaySessionStore _playSessionStore; + + @Inject private SsoManager _ssoManager; + + @Inject AuthServiceClient _authClient; + + @Inject + public AuthenticationController(@Nonnull Config configs) { + _cookieConfigs = new CookieConfigs(configs); + _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); + _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + } + + /** + * Route used to perform authentication, or redirect to log in if authentication fails. + * + *

If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider + * (Indirect auth). If not, we will fall back to the default username / password login experience + * (Direct auth). + */ + @Nonnull + public Result authenticate(Http.Request request) { + + // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is + // authenticated. + + final Optional maybeRedirectPath = + Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); + String redirectPath = maybeRedirectPath.orElse("/"); + try { + URI redirectUri = new URI(redirectPath); + if (redirectUri.getScheme() != null || redirectUri.getAuthority() != null) { + throw new InvalidRedirectLocationException("Redirect location must be relative to the base url, cannot " + + "redirect to other domains: " + redirectPath, redirectPath); + } + } catch (URISyntaxException | InvalidRedirectLocationException e) { + _logger.warn(e.getMessage()); + redirectPath = "/"; } - /** - * Route used to perform authentication, or redirect to log in if authentication fails. - * - * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth). - * If not, we will fall back to the default username / password login experience (Direct auth). - */ - @Nonnull - public Result authenticate(Http.Request request) { - - // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated. - - final Optional maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); - - if (AuthUtils.hasValidSessionCookie(request)) { - return Results.redirect(redirectPath); - } - - // 1. If SSO is enabled, redirect to IdP if not authenticated. - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, redirectPath).orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - - // 2. If either JAAS auth or Native auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { - return Results.redirect( - LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); - } - - // 3. If no auth enabled, fallback to using default user account & redirect. - // Generate GMS session token, TODO: - final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); - return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) - .withCookies( - createActorCookie( - DEFAULT_ACTOR_URN.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + if (AuthUtils.hasValidSessionCookie(request)) { + return Results.redirect(redirectPath); } - /** - * Redirect to the identity provider for authentication. - */ - @Nonnull - public Result sso(Http.Request request) { - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, "/").orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + // 1. If SSO is enabled, redirect to IdP if not authenticated. + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, redirectPath) + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); } - /** - * Log in a user based on a username + password. - * - * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default. - */ - @Nonnull - public Result logIn(Http.Request request) { - boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); - _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; - if (noAuthEnabled) { - String message = "Neither JAAS nor native authentication is enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } - - final JsonNode json = request.body().asJson(); - final String username = json.findPath(USER_NAME).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - - if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); - return Results.badRequest(invalidCredsJson); - } - - JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); - boolean loginSucceeded = tryLogin(username, password); - - if (!loginSucceeded) { - return Results.badRequest(invalidCredsJson); - } + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() + || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); + } - final Urn actorUrn = new CorpuserUrn(username); - final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - return createSession(actorUrn.toString(), accessToken); + // 3. If no auth enabled, fallback to using default user account & redirect. + // Generate GMS session token, TODO: + final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); + return Results.redirect(redirectPath) + .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) + .withCookies( + createActorCookie( + DEFAULT_ACTOR_URN.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } + + /** Redirect to the identity provider for authentication. */ + @Nonnull + public Result sso(Http.Request request) { + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, "/") + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); + } + return Results.redirect( + LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + } + + /** + * Log in a user based on a username + password. + * + *

TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the + * default. + */ + @Nonnull + public Result logIn(Http.Request request) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); } - /** - * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token. - * - */ - @Nonnull - public Result signUp(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } + final JsonNode json = request.body().asJson(); + final String username = json.findPath(USER_NAME).textValue(); + final String password = json.findPath(PASSWORD).textValue(); - final JsonNode json = request.body().asJson(); - final String fullName = json.findPath(FULL_NAME).textValue(); - final String email = json.findPath(EMAIL).textValue(); - final String title = json.findPath(TITLE).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + if (StringUtils.isBlank(username)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(fullName)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); - return Results.badRequest(invalidCredsJson); - } + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (!loginSucceeded) { + _logger.info("Login failed for user: {}", username); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn actorUrn = new CorpuserUrn(username); + _logger.info("Login successful for user: {}, urn: {}", username, actorUrn); + final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); + return createSession(actorUrn.toString(), accessToken); + } + + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match + * an existing invite token. + */ + @Nonnull + public Result signUp(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); + } - if (StringUtils.isBlank(title)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); - if (StringUtils.isBlank(inviteToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } } - /** - * Reset a native user's credentials based on a username, old password, and new password. - * - */ - @Nonnull - public Result resetNativeUserCredentials(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return badRequest(error); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final JsonNode json = request.body().asJson(); - final String email = json.findPath(EMAIL).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String resetToken = json.findPath(RESET_TOKEN).textValue(); + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = + Json.newObject().put("message", "Invite token must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + _logger.info("Signed up user {} using invite tokens", userUrnString); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + /** Reset a native user's credentials based on a username, old password, and new password. */ + @Nonnull + public Result resetNativeUserCredentials(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } - if (StringUtils.isBlank(resetToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); } - private Optional redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) { - final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); - final Client client = _ssoManager.getSsoProvider().client(); - configurePac4jSessionStore(playWebContext, client, redirectPath); - try { - final Optional action = client.getRedirectionAction(playWebContext); - return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); - } catch (Exception e) { - if (_verbose) { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e); - } else { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); - } - return Optional.of(Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8)))); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); } - private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) { - // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session - // to reduce size of the session cookie - FoundAction foundAction = new FoundAction(redirectPath); - byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); - String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); - context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); - // This is to prevent previous login attempts from being cached. - // We replicate the logic here, which is buried in the Pac4j client. - if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) { - _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors."); - _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); - } + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return Results.badRequest(invalidCredsJson); } - private String encodeRedirectUri(final String redirectUri) { - return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + private Optional redirectToIdentityProvider( + Http.RequestHeader request, String redirectPath) { + final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); + final Client client = _ssoManager.getSsoProvider().client(); + configurePac4jSessionStore(playWebContext, client, redirectPath); + try { + final Optional action = client.getRedirectionAction(playWebContext); + return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); + } catch (Exception e) { + if (_verbose) { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", + e); + } else { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); + } + return Optional.of( + Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8)))); } - - private boolean tryLogin(String username, String password) { - boolean loginSucceeded = false; - - // First try jaas login, if enabled - if (_jaasConfigs.isJAASEnabled()) { - try { - _logger.debug("Attempting jaas authentication"); - AuthenticationManager.authenticateJaasUser(username, password); - _logger.debug("Jaas authentication successful. Login succeeded"); - loginSucceeded = true; - } catch (Exception e) { - if (_verbose) { - _logger.debug("Jaas authentication error. Login failed", e); - } else { - _logger.debug("Jaas authentication error. Login failed"); - } - } - } - - // If jaas login fails or is disabled, try native auth login - if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { - final Urn userUrn = new CorpuserUrn(username); - final String userUrnString = userUrn.toString(); - loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + private void configurePac4jSessionStore( + PlayWebContext context, Client client, String redirectPath) { + // Set the originally requested path for post-auth redirection. We split off into a separate + // cookie from the session + // to reduce size of the session cookie + FoundAction foundAction = new FoundAction(redirectPath); + byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); + String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); + context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); + // This is to prevent previous login attempts from being cached. + // We replicate the logic here, which is buried in the Pac4j client. + if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) + != null) { + _logger.debug( + "Found previous login attempt. Removing it manually to prevent unexpected errors."); + _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); + } + } + + private String encodeRedirectUri(final String redirectUri) { + return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + } + + private boolean tryLogin(String username, String password) { + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting JAAS authentication for user: {}", username); + AuthenticationManager.authenticateJaasUser(username, password); + _logger.debug("JAAS authentication successful. Login succeeded"); + loginSucceeded = true; + } catch (Exception e) { + if (_verbose) { + _logger.debug("JAAS authentication error. Login failed", e); + } else { + _logger.debug("JAAS authentication error. Login failed"); } - - return loginSucceeded; + } } - private Result createSession(String userUrnString, String accessToken) { - return Results.ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies( - createActorCookie( - userUrnString, - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); - + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = + loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); } -} \ No newline at end of file + + return loginSucceeded; + } + + private Result createSession(String userUrnString, String accessToken) { + return Results.ok() + .withSession(createSessionMap(userUrnString, accessToken)) + .withCookies( + createActorCookie( + userUrnString, + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } +} diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index 5e24fe9f8220cf..eea1c662ebf894 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -2,18 +2,15 @@ import com.typesafe.config.Config; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; import org.pac4j.play.LogoutController; import play.mvc.Http; import play.mvc.Result; import play.mvc.Results; -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; - -/** - * Responsible for handling logout logic with oidc providers - */ +/** Responsible for handling logout logic with oidc providers */ @Slf4j public class CentralLogoutController extends LogoutController { private static final String AUTH_URL_CONFIG_PATH = "/login"; @@ -28,26 +25,27 @@ public CentralLogoutController(Config config) { setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*"); setLocalLogout(true); setCentralLogout(true); - } - /** - * logout() method should not be called if oidc is not enabled - */ + /** logout() method should not be called if oidc is not enabled */ public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { - log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); + log.error( + "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", + e); return redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8))) - .withNewSession(); + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8))) + .withNewSession(); } } - return Results.redirect(AUTH_URL_CONFIG_PATH) - .withNewSession(); + return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession(); } } diff --git a/datahub-frontend/app/controllers/RedirectController.java b/datahub-frontend/app/controllers/RedirectController.java new file mode 100644 index 00000000000000..17f86b7fbffae3 --- /dev/null +++ b/datahub-frontend/app/controllers/RedirectController.java @@ -0,0 +1,25 @@ +package controllers; + +import config.ConfigurationProvider; +import javax.inject.Inject; +import javax.inject.Singleton; +import play.mvc.Controller; +import play.mvc.Http; +import play.mvc.Result; + +@Singleton +public class RedirectController extends Controller { + + @Inject ConfigurationProvider config; + + public Result favicon(Http.Request request) { + if (config.getVisualConfig().getAssets().getFaviconUrl().startsWith("http")) { + return permanentRedirect(config.getVisualConfig().getAssets().getFaviconUrl()); + } else { + final String prefix = config.getVisualConfig().getAssets().getFaviconUrl().startsWith("/") ? "/public" : "/public/"; + return ok(Application.class.getResourceAsStream( + prefix + config.getVisualConfig().getAssets().getFaviconUrl())) + .as("image/x-icon"); + } + } +} diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 7a4b5585cc21ab..750886570bf406 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -1,16 +1,26 @@ package controllers; import auth.CookieConfigs; +import auth.sso.SsoManager; +import auth.sso.SsoProvider; +import auth.sso.oidc.OidcCallbackLogic; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletionStage; import javax.annotation.Nonnull; import javax.inject.Inject; +import javax.inject.Named; + +import io.datahubproject.metadata.context.OperationContext; import lombok.extern.slf4j.Slf4j; +import org.pac4j.core.client.Client; +import org.pac4j.core.client.Clients; import org.pac4j.core.config.Config; import org.pac4j.core.engine.CallbackLogic; import org.pac4j.core.http.adapter.HttpActionAdapter; @@ -18,84 +28,127 @@ import org.pac4j.play.PlayWebContext; import play.mvc.Http; import play.mvc.Result; -import auth.sso.oidc.OidcCallbackLogic; -import auth.sso.SsoManager; -import auth.sso.SsoProvider; import play.mvc.Results; - /** * A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after * off-platform authentication. * - * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines + *

Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines * the handling logic to invoke. */ @Slf4j public class SsoCallbackController extends CallbackController { private final SsoManager _ssoManager; + private final Config _config; @Inject public SsoCallbackController( @Nonnull SsoManager ssoManager, - @Nonnull Authentication systemAuthentication, + @Named("systemOperationContext") @Nonnull OperationContext systemOperationContext, @Nonnull SystemEntityClient entityClient, @Nonnull AuthServiceClient authClient, + @Nonnull Config config, @Nonnull com.typesafe.config.Config configs) { _ssoManager = ssoManager; + _config = config; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); - setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs))); + setCallbackLogic( + new SsoCallbackLogic( + ssoManager, + systemOperationContext, + entityClient, + authClient, + new CookieConfigs(configs))); } public CompletionStage handleCallback(String protocol, Http.Request request) { if (shouldHandleCallback(protocol)) { - log.debug(String.format("Handling SSO callback. Protocol: %s", protocol)); - return callback(request).handle((res, e) -> { - if (e != null) { - log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e); - return Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode( - "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", - StandardCharsets.UTF_8))) - .discardingCookie("actor") - .withNewSession(); - } - return res; - }); + log.debug("Handling SSO callback. Protocol: {}", + _ssoManager.getSsoProvider().protocol().getCommonName()); + return callback(request) + .handle( + (res, e) -> { + if (e != null) { + log.error( + "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", + e); + return Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", + StandardCharsets.UTF_8))) + .discardingCookie("actor") + .withNewSession(); + } + return res; + }); } - return CompletableFuture.completedFuture(Results.internalServerError( - String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); + return CompletableFuture.completedFuture( + Results.internalServerError( + String.format( + "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); } - - /** - * Logic responsible for delegating to protocol-specific callback logic. - */ + /** Logic responsible for delegating to protocol-specific callback logic. */ public class SsoCallbackLogic implements CallbackLogic { private final OidcCallbackLogic _oidcCallbackLogic; - SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) { - _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); + SsoCallbackLogic( + final SsoManager ssoManager, + final OperationContext systemOperationContext, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + _oidcCallbackLogic = + new OidcCallbackLogic( + ssoManager, systemOperationContext, entityClient, authClient, cookieConfigs); } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) { - return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient); + return _oidcCallbackLogic.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, + defaultClient); } // Should never occur. - throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC."); + throw new UnsupportedOperationException( + "Failed to find matching SSO Provider. Only one supported is OIDC."); } } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + if (!_ssoManager.isSsoEnabled()) { + return false; + } + updateConfig(); + return _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + } + + private void updateConfig() { + final Clients clients = new Clients(); + final List clientList = new ArrayList<>(); + clientList.add(_ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + _config.setClients(clients); } } diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 776ab5cad58ff0..254a8cc640d0c5 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,14 +1,15 @@ package controllers; +import static auth.AuthUtils.ACTOR; + import auth.Authenticator; import client.AuthServiceClient; +import client.KafkaTrackingProducer; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Singleton; - - import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,57 +17,52 @@ import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import client.KafkaTrackingProducer; - -import static auth.AuthUtils.ACTOR; - // TODO: Migrate this to metadata-service. @Singleton public class TrackingController extends Controller { - private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); + private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); - private final String _topic; + private final String _topic; - @Inject - KafkaTrackingProducer _producer; + @Inject KafkaTrackingProducer _producer; - @Inject - AuthServiceClient _authClient; + @Inject AuthServiceClient _authClient; - @Inject - public TrackingController(@Nonnull Config config) { - _topic = config.getString("analytics.tracking.topic"); - } + @Inject + public TrackingController(@Nonnull Config config) { + _topic = config.getString("analytics.tracking.topic"); + } - @Security.Authenticated(Authenticator.class) - @Nonnull - public Result track(Http.Request request) throws Exception { - if (!_producer.isEnabled()) { - // If tracking is disabled, simply return a 200. - return status(200); - } + @Security.Authenticated(Authenticator.class) + @Nonnull + public Result track(Http.Request request) throws Exception { + if (!_producer.isEnabled()) { + // If tracking is disabled, simply return a 200. + return status(200); + } - JsonNode event; - try { - event = request.body().asJson(); - } catch (Exception e) { - return badRequest(); - } - final String actor = request.session().data().get(ACTOR); - try { - _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); - final ProducerRecord record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _authClient.track(event.toString()); - return ok(); - } catch (Exception e) { - _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); - return internalServerError(e.getMessage()); - } + JsonNode event; + try { + event = request.body().asJson(); + } catch (Exception e) { + return badRequest(); + } + final String actor = request.session().data().get(ACTOR); + try { + _logger.debug( + String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); + final ProducerRecord record = + new ProducerRecord<>(_topic, actor, event.toString()); + _producer.send(record); + _authClient.track(event.toString()); + return ok(); + } catch (Exception e) { + _logger.error( + String.format( + "Failed to emit product analytics event. actor: %s, event: %s", actor, event)); + return internalServerError(e.getMessage()); } + } } diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java index 8af90b37a6f31b..55752644ada706 100644 --- a/datahub-frontend/app/security/AuthUtil.java +++ b/datahub-frontend/app/security/AuthUtil.java @@ -8,52 +8,53 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.digest.HmacAlgorithms; - -/** - * Auth Utils - * Adheres to HSEC requirement for creating application tokens - */ +/** Auth Utils Adheres to HSEC requirement for creating application tokens */ public final class AuthUtil { private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString(); private static final String DELIIMITER = ":"; private static final String HEX_CHARS = "0123456789ABCDEF"; - private AuthUtil() { } + private AuthUtil() {} /** * Generate hash string using the secret HMAC Key + * * @param value value to be hashed * @param hmacKey secret HMAC key * @return Hashed string using the secret key * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ - public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException { - //Time-stamp at Encryption time + public static String generateHash(String value, byte[] hmacKey) + throws NoSuchAlgorithmException, InvalidKeyException { + // Time-stamp at Encryption time long tStamp = System.currentTimeMillis(); String uTValue = new String(); String cValue; String finalEncValue; - //Concatenated Values + // Concatenated Values uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp)); cValue = uTValue; - //Digest - HMAC-SHA256 + // Digest - HMAC-SHA256 SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); byte[] rawHmac = mac.doFinal(uTValue.getBytes()); String hmacString = getHex(rawHmac); - finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); + finalEncValue = + Base64.getEncoder() + .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); return finalEncValue; } /** * Validate the one-way hash string + * * @param hashedValue Hashed value to be validated * @param hmacKey HMAC Key used to create the hash * @param sessionWindow previously defined session window to validate if the hash is expired @@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg */ public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow) throws GeneralSecurityException { - //Username:Timestamp:SignedHMAC(Username:Timestamp) + // Username:Timestamp:SignedHMAC(Username:Timestamp) String[] decryptedHash = decryptBase64Hash(hashedValue); String username = decryptedHash[0]; String timestamp = decryptedHash[1]; @@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session long newTStamp = System.currentTimeMillis(); String newUTValue = username.concat(DELIIMITER).concat(timestamp); - //Digest - HMAC-SHA1 Verify + // Digest - HMAC-SHA1 Verify SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); @@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session return decryptedHash[0]; } + /** * Decrypt base64 hash + * * @param value base 64 hash string * @return Decrypted base 64 string */ @@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) { String decodedBase64 = new String(Base64.getDecoder().decode(value)); return decodedBase64.split(DELIIMITER); } + /** * Get Hex string from byte array + * * @param raw byte array * @return Hex representation of the byte array */ @@ -114,14 +119,16 @@ private static String getHex(byte[] raw) { return hex.toString(); } + /** * Compares two HMAC byte arrays + * * @param a HMAC byte array 1 * @param b HMAC byte array 2 * @return true if the two HMAC are identical */ private static boolean isEqual(byte[] a, byte[] b) { - if (a == null || b == null || a.length != b.length) { + if (a == null || b == null || a.length != b.length) { return false; } @@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) { return result == 0; } -} \ No newline at end of file +} diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 67bcf7e404335f..f46dc57c232bd2 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -15,13 +15,12 @@ import org.eclipse.jetty.jaas.PropertyUserStoreManager; import play.Logger; - public class AuthenticationManager { - private AuthenticationManager(boolean verbose) { - } + private AuthenticationManager(boolean verbose) {} - public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) + throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); @@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); JAASLoginService.INSTANCE.set(jaasLoginService); try { - LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); + LoginContext lc = + new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); lc.login(); } catch (LoginException le) { - AuthenticationException authenticationException = new AuthenticationException(le.getMessage()); + AuthenticationException authenticationException = + new AuthenticationException(le.getMessage()); authenticationException.setRootCause(le); throw authenticationException; } @@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) { NameCallback nc = null; PasswordCallback pc = null; for (Callback callback : callbacks) { - Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback); + Logger.debug( + "The submitted callback is of type: " + callback.getClass() + " : " + callback); if (callback instanceof NameCallback) { nc = (NameCallback) callback; nc.setName(this.username); diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java index 56822f0805be41..c46fa29e1599ad 100644 --- a/datahub-frontend/app/security/DummyLoginModule.java +++ b/datahub-frontend/app/security/DummyLoginModule.java @@ -1,21 +1,22 @@ package security; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; -import java.util.Map; - /** - * This LoginModule performs dummy authentication. - * Any username and password can work for authentication + * This LoginModule performs dummy authentication. Any username and password can work for + * authentication */ public class DummyLoginModule implements LoginModule { - public void initialize(final Subject subject, final CallbackHandler callbackHandler, - final Map sharedState, final Map options) { - } + public void initialize( + final Subject subject, + final CallbackHandler callbackHandler, + final Map sharedState, + final Map options) {} public boolean login() throws LoginException { return true; @@ -32,5 +33,4 @@ public boolean abort() throws LoginException { public boolean logout() throws LoginException { return true; } - -} \ No newline at end of file +} diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java index b99a5e123b9eb9..5c80389c96da49 100644 --- a/datahub-frontend/app/utils/ConfigUtil.java +++ b/datahub-frontend/app/utils/ConfigUtil.java @@ -3,18 +3,16 @@ import com.linkedin.util.Configuration; import com.typesafe.config.Config; - public class ConfigUtil { - private ConfigUtil() { - - } + private ConfigUtil() {} // New configurations, provided via application.conf file. public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host"; public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port"; public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl"; - public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol"; + public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = + "metadataService.sslProtocol"; // Legacy env-var based config values, for backwards compatibility: public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST"; @@ -27,10 +25,14 @@ private ConfigUtil() { public static final String DEFAULT_GMS_PORT = "8080"; public static final String DEFAULT_GMS_USE_SSL = "False"; - public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); - public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); - public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); - public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); + public static final String DEFAULT_METADATA_SERVICE_HOST = + Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); + public static final Integer DEFAULT_METADATA_SERVICE_PORT = + Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); + public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = + Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); + public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = + Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); public static boolean getBoolean(Config config, String key) { return config.hasPath(key) && config.getBoolean(key); diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java index 2c52ff5b40156c..803c70a63646a0 100644 --- a/datahub-frontend/app/utils/SearchUtil.java +++ b/datahub-frontend/app/utils/SearchUtil.java @@ -2,29 +2,26 @@ import javax.annotation.Nonnull; - -/** - * Utility functions for Search - */ +/** Utility functions for Search */ public class SearchUtil { - private SearchUtil() { - //utility class - } + private SearchUtil() { + // utility class + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - * - * @param input - * @return - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + * + * @param input + * @return + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index fdf13bac0accc0..ab4ce405a55411 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -1,7 +1,7 @@ plugins { - id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' + id 'org.gradle.playframework' } apply from: "../gradle/versioning/versioning.gradle" @@ -20,7 +20,6 @@ model { } task myTar(type: Tar) { - extension = "tgz" compression = Compression.GZIP from("${buildDir}/stage") @@ -39,25 +38,6 @@ artifacts { archives myTar } -graphqlCodegen { - // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = ["$projectDir/conf/datahub-frontend.graphql".toString()] - outputDir = new File("$projectDir/app/graphql") - packageName = "generated" - generateApis = true - modelValidationAnnotation = "" - customTypesMapping = [ - Long: "Long", - ] -} - -tasks.withType(Checkstyle) { - exclude "**/generated/**" -} - -checkstyleMain.source = "app/" - - /* PLAY UPGRADE NOTE Generates the distribution jars under the expected names. The playFramework plugin only accepts certain name values @@ -77,10 +57,11 @@ docker { version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_dir}/Dockerfile") files fileTree(rootProject.projectDir) { + include '.dockerignore' include 'docker/monitoring/*' include "docker/${docker_dir}/*" }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") @@ -88,9 +69,25 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } -task unversionZip(type: Copy, dependsOn: [':datahub-web-react:build', dist]) { +task unversionZip(type: Copy, dependsOn: [':datahub-web-react:distZip', dist]) { from ("${buildDir}/distributions") include "datahub-frontend-${version}.zip" into "${buildDir}/docker/" @@ -103,4 +100,24 @@ task cleanLocalDockerImages { rootProject.ext.cleanLocalDockerImages(docker_registry, docker_repo, "${version}") } } -dockerClean.finalizedBy(cleanLocalDockerImages) \ No newline at end of file +dockerClean.finalizedBy(cleanLocalDockerImages) + +// gradle 8 fixes +tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist' +tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist' +stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist +tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts +playBinaryDistTar.dependsOn createMainStartScripts +playBinaryDistZip.dependsOn createMainStartScripts +createMainStartScripts.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageMainDist' +createPlayBinaryZipDist.dependsOn 'stageMainDist' diff --git a/datahub-frontend/conf/application.conf b/datahub-frontend/conf/application.conf index 1a62c8547e721f..63ff2c9166fbc9 100644 --- a/datahub-frontend/conf/application.conf +++ b/datahub-frontend/conf/application.conf @@ -38,8 +38,12 @@ jwt { play.server.provider = server.CustomAkkaHttpServerProvider play.http.server.akka.max-header-count = 64 play.http.server.akka.max-header-count = ${?DATAHUB_AKKA_MAX_HEADER_COUNT} -play.server.akka.max-header-size = 8k +# max-header-size is reportedly no longer used +play.server.akka.max-header-size = 32k play.server.akka.max-header-size = ${?DATAHUB_AKKA_MAX_HEADER_VALUE_LENGTH} +# max header value length seems to impact the actual limit +play.server.akka.max-header-value-length = 32k +play.server.akka.max-header-value-length = ${?DATAHUB_AKKA_MAX_HEADER_VALUE_LENGTH} # Update AUTH_COOKIE_SAME_SITE and AUTH_COOKIE_SECURE in order to change how authentication cookies # are configured. If you wish cookies to be sent in first and third party contexts, set @@ -182,6 +186,8 @@ auth.oidc.customParam.resource = ${?AUTH_OIDC_CUSTOM_PARAM_RESOURCE} auth.oidc.readTimeout = ${?AUTH_OIDC_READ_TIMEOUT} auth.oidc.extractJwtAccessTokenClaims = ${?AUTH_OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS} # Whether to extract claims from JWT access token. Defaults to false. auth.oidc.preferredJwsAlgorithm = ${?AUTH_OIDC_PREFERRED_JWS_ALGORITHM} # Which jws algorithm to use +auth.oidc.acrValues = ${?AUTH_OIDC_ACR_VALUES} +auth.oidc.grantType = ${?AUTH_OIDC_GRANT_TYPE} # # By default, the callback URL that should be registered with the identity provider is computed as {$baseUrl}/callback/oidc. @@ -196,6 +202,10 @@ auth.oidc.preferredJwsAlgorithm = ${?AUTH_OIDC_PREFERRED_JWS_ALGORITHM} # Which # auth.jaas.enabled = ${?AUTH_JAAS_ENABLED} auth.native.enabled = ${?AUTH_NATIVE_ENABLED} + +# Enforces the usage of a valid email for user sign up +auth.native.signUp.enforceValidEmail = true +auth.native.signUp.enforceValidEmail = ${?ENFORCE_VALID_EMAIL} # # To disable all authentication to the app, and proxy all users through a master "datahub" account, make sure that, # jaas, native and oidc auth are disabled: @@ -284,4 +294,8 @@ systemClientSecret=${?DATAHUB_SYSTEM_CLIENT_SECRET} entityClient.retryInterval = 2 entityClient.retryInterval = ${?ENTITY_CLIENT_RETRY_INTERVAL} entityClient.numRetries = 3 -entityClient.numRetries = ${?ENTITY_CLIENT_NUM_RETRIES} \ No newline at end of file +entityClient.numRetries = ${?ENTITY_CLIENT_NUM_RETRIES} +entityClient.restli.get.batchSize = 50 +entityClient.restli.get.batchSize = ${?ENTITY_CLIENT_RESTLI_GET_BATCH_SIZE} +entityClient.restli.get.batchConcurrency = 2 +entityClient.restli.get.batchConcurrency = ${?ENTITY_CLIENT_RESTLI_GET_BATCH_CONCURRENCY} \ No newline at end of file diff --git a/datahub-frontend/conf/logback.xml b/datahub-frontend/conf/logback.xml index 2a542083e20a27..78da231b4a71c5 100644 --- a/datahub-frontend/conf/logback.xml +++ b/datahub-frontend/conf/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index 3102c26497fedd..9eac7aa34c3e37 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,11 +36,18 @@ PUT /openapi/*path c HEAD /openapi/*path controllers.Application.proxy(path: String, request: Request) PATCH /openapi/*path controllers.Application.proxy(path: String, request: Request) -# Map static resources from the /public folder to the /assets URL path -GET /assets/*file controllers.Assets.at(path="/public", file) # Analytics route POST /track controllers.TrackingController.track(request: Request) -# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle +# Map static resources from the /public folder to the /assets URL path +GET /assets/icons/favicon.ico controllers.RedirectController.favicon(request: Request) + +# Known React asset routes +GET /assets/*file controllers.Assets.at(path="/public/assets", file) +GET /node_modules/*file controllers.Assets.at(path="/public/node_modules", file) +GET /manifest.json controllers.Assets.at(path="/public", file="manifest.json") +GET /robots.txt controllers.Assets.at(path="/public", file="robots.txt") + +# Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle's index.html GET /*path controllers.Application.index(path) diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index daecba16cbf723..b14962e5900cd2 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -1,4 +1,3 @@ -apply plugin: "org.gradle.playframework" // Change this to listen on a different port project.ext.httpPort = 9001 @@ -58,6 +57,7 @@ dependencies { implementation externalDependency.shiroCore implementation externalDependency.playCache + implementation externalDependency.playCaffeineCache implementation externalDependency.playWs implementation externalDependency.playServer implementation externalDependency.playAkkaHttpServer @@ -76,7 +76,7 @@ dependencies { implementation externalDependency.slf4jApi compileOnly externalDependency.lombok - runtimeOnly externalDependency.guice + runtimeOnly externalDependency.guicePlay runtimeOnly (externalDependency.playDocs) { exclude group: 'com.typesafe.akka', module: 'akka-http-core_2.12' } @@ -90,7 +90,7 @@ dependencies { play { platform { - playVersion = '2.8.18' + playVersion = '2.8.21' scalaVersion = '2.12' javaVersion = JavaVersion.VERSION_11 } @@ -100,4 +100,25 @@ play { test { useJUnitPlatform() + + testLogging.showStandardStreams = true + testLogging.exceptionFormat = 'full' + + def playJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + //"--add-opens=java.base/java.net=ALL-UNNAMED", + //"--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = playJava17CompatibleJvmArgs } diff --git a/datahub-frontend/public b/datahub-frontend/public new file mode 120000 index 00000000000000..60c68c7b4b1bc3 --- /dev/null +++ b/datahub-frontend/public @@ -0,0 +1 @@ +../datahub-web-react/public \ No newline at end of file diff --git a/datahub-frontend/run/logback.xml b/datahub-frontend/run/logback.xml index 9cabd3c923aa2c..5d275c821e16f1 100644 --- a/datahub-frontend/run/logback.xml +++ b/datahub-frontend/run/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props diff --git a/datahub-frontend/run/run-local-frontend b/datahub-frontend/run/run-local-frontend index 93b5328c5e116a..1dc6e4ab3b3cbc 100755 --- a/datahub-frontend/run/run-local-frontend +++ b/datahub-frontend/run/run-local-frontend @@ -1,7 +1,7 @@ #!/bin/bash CURRENT_DIR=$(pwd) -BUILD_DIR=../build/stage/playBinary +BUILD_DIR=../build/stage/main CONF_DIR=$BUILD_DIR/conf set -a diff --git a/datahub-frontend/run/run-local-frontend-debug b/datahub-frontend/run/run-local-frontend-debug index 4d868d75647d8a..c071ef1ff9714f 100755 --- a/datahub-frontend/run/run-local-frontend-debug +++ b/datahub-frontend/run/run-local-frontend-debug @@ -1,7 +1,7 @@ #!/bin/bash CURRENT_DIR=$(pwd) -BUILD_DIR=../build/stage/playBinary +BUILD_DIR=../build/stage/main CONF_DIR=$BUILD_DIR/conf set -a diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index f27fefdb796691..534cffb5cc7fe4 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -1,11 +1,22 @@ package app; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static play.mvc.Http.Status.NOT_FOUND; +import static play.mvc.Http.Status.OK; +import static play.test.Helpers.fakeRequest; +import static play.test.Helpers.route; + import com.nimbusds.jwt.JWT; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.JWTParser; import controllers.routes; +import java.io.IOException; +import java.net.InetAddress; import java.text.ParseException; import java.util.Date; +import java.util.List; +import java.util.Map; import no.nav.security.mock.oauth2.MockOAuth2Server; import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback; import okhttp3.mockwebserver.MockResponse; @@ -26,22 +37,9 @@ import play.mvc.Http; import play.mvc.Result; import play.test.Helpers; - import play.test.TestBrowser; import play.test.WithBrowser; -import java.io.IOException; -import java.net.InetAddress; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static play.mvc.Http.Status.NOT_FOUND; -import static play.mvc.Http.Status.OK; -import static play.test.Helpers.fakeRequest; -import static play.test.Helpers.route; - @TestInstance(TestInstance.Lifecycle.PER_CLASS) @SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test") @SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "") @@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser { @Override protected Application provideApplication() { return new GuiceApplicationBuilder() - .configure("metadataService.port", String.valueOf(gmsServerPort())) - .configure("auth.baseUrl", "http://localhost:" + providePort()) - .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration") - .in(new Environment(Mode.TEST)).build(); + .configure("metadataService.port", String.valueOf(gmsServerPort())) + .configure("auth.baseUrl", "http://localhost:" + providePort()) + .configure( + "auth.oidc.discoveryUri", + "http://localhost:" + + oauthServerPort() + + "/testIssuer/.well-known/openid-configuration") + .in(new Environment(Mode.TEST)) + .build(); } @Override @@ -89,17 +91,24 @@ public int gmsServerPort() { @BeforeAll public void init() throws IOException { _gmsServer = new MockWebServer(); + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested + _gmsServer.enqueue(new MockResponse().setResponseCode(404)); // dynamic settings - not tested _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); - _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); + _gmsServer.enqueue( + new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); _gmsServer.start(gmsServerPort()); _oauthServer = new MockOAuth2Server(); _oauthServer.enqueueCallback( - new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of( - "email", "testUser@myCompany.com", - "groups", "myGroup" - ), 600) - ); + new DefaultOAuth2TokenCallback( + ISSUER_ID, + "testUser", + List.of(), + Map.of( + "email", "testUser@myCompany.com", + "groups", "myGroup"), + 600)); _oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort()); // Discovery url to authorization server metadata @@ -147,8 +156,9 @@ public void testIndexNotFound() { @Test public void testOpenIdConfig() { - assertEquals("http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl); + assertEquals( + "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration", + _wellKnownUrl); } @Test @@ -166,8 +176,13 @@ public void testHappyPathOidc() throws ParseException { Map data = (Map) claims.getClaim("data"); assertEquals(TEST_TOKEN, data.get("token")); assertEquals(TEST_USER, data.get("actor")); - // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes - assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0); + // Default expiration is 24h, so should always be less than current time + 1 day since it stamps + // the time before this executes + assertTrue( + claims + .getExpirationTime() + .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) + < 0); } @Test @@ -180,8 +195,27 @@ public void testAPI() throws ParseException { } @Test - public void testOidcRedirectToRequestedUrl() throws InterruptedException { + public void testOidcRedirectToRequestedUrl() { browser.goTo("/authenticate?redirect_uri=%2Fcontainer%2Furn%3Ali%3Acontainer%3ADATABASE"); assertEquals("container/urn:li:container:DATABASE", browser.url()); } + + /** + * The Redirect Uri parameter is used to store a previous relative location within the app to be able to + * take a user back to their expected page. Redirecting to other domains should be blocked. + */ + @Test + public void testInvalidRedirectUrl() { + browser.goTo("/authenticate?redirect_uri=https%3A%2F%2Fwww.google.com"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=file%3A%2F%2FmyFile"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=ftp%3A%2F%2FsomeFtp"); + assertEquals("", browser.url()); + + browser.goTo("/authenticate?redirect_uri=localhost%3A9002%2Flogin"); + assertEquals("", browser.url()); + } } diff --git a/datahub-frontend/test/oidc/OidcCallbackLogicTest.java b/datahub-frontend/test/oidc/OidcCallbackLogicTest.java new file mode 100644 index 00000000000000..f4784c29e91f2e --- /dev/null +++ b/datahub-frontend/test/oidc/OidcCallbackLogicTest.java @@ -0,0 +1,64 @@ +package oidc; + +import auth.sso.oidc.OidcConfigs; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; + +import static auth.sso.oidc.OidcCallbackLogic.getGroupNames; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.pac4j.core.profile.CommonProfile; + +public class OidcCallbackLogicTest { + + @Test + public void testGetGroupsClaimNamesJsonArray() { + CommonProfile profile = createMockProfileWithAttribute("[\"group1\", \"group2\"]", "groupsClaimName"); + Collection result = getGroupNames(profile, "[\"group1\", \"group2\"]", "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + @Test + public void testGetGroupNamesWithSingleGroup() { + CommonProfile profile = createMockProfileWithAttribute("group1", "groupsClaimName"); + Collection result = getGroupNames(profile, "group1", "groupsClaimName"); + assertEquals(Arrays.asList("group1"), result); + } + + @Test + public void testGetGroupNamesWithCommaSeparated() { + CommonProfile profile = createMockProfileWithAttribute("group1,group2", "groupsClaimName"); + Collection result = getGroupNames(profile, "group1,group2", "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + + @Test + public void testGetGroupNamesWithCollection() { + CommonProfile profile = createMockProfileWithAttribute(Arrays.asList("group1", "group2"), "groupsClaimName"); + Collection result = getGroupNames(profile, Arrays.asList("group1", "group2"), "groupsClaimName"); + assertEquals(Arrays.asList("group1", "group2"), result); + } + // Helper method to create a mock CommonProfile with given attribute + private CommonProfile createMockProfileWithAttribute(Object attribute, String attributeName) { + CommonProfile profile = mock(CommonProfile.class); + + // Mock for getAttribute(String) + when(profile.getAttribute(attributeName)).thenReturn(attribute); + + // Mock for getAttribute(String, Class) + if (attribute instanceof Collection) { + when(profile.getAttribute(attributeName, Collection.class)).thenReturn((Collection) attribute); + } else if (attribute instanceof String) { + when(profile.getAttribute(attributeName, String.class)).thenReturn((String) attribute); + } + // Add more conditions here if needed for other types + + return profile; + } +} diff --git a/datahub-web-react/public/logo.png b/datahub-frontend/test/resources/public/logos/datahub-logo.png similarity index 100% rename from datahub-web-react/public/logo.png rename to datahub-frontend/test/resources/public/logos/datahub-logo.png diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java index 6727513d884af1..9bf2b5dd4d11c0 100644 --- a/datahub-frontend/test/security/DummyLoginModuleTest.java +++ b/datahub-frontend/test/security/DummyLoginModuleTest.java @@ -1,14 +1,12 @@ package security; -import com.sun.security.auth.callback.TextCallbackHandler; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import com.sun.security.auth.callback.TextCallbackHandler; import java.util.HashMap; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class DummyLoginModuleTest { diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index ed16014b58e595..1c52d45af5f9e0 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,9 @@ package security; +import static auth.AuthUtils.*; +import static auth.sso.oidc.OidcConfigs.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import auth.sso.oidc.OidcConfigs; import auth.sso.oidc.OidcProvider; import com.typesafe.config.Config; @@ -19,303 +23,322 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; - -import static auth.sso.oidc.OidcConfigs.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - +import org.json.JSONObject; public class OidcConfigurationTest { - private static final com.typesafe.config.Config CONFIG = new Config() { - - private final Map _map = new HashMap<>(); - - @Override - public ConfigObject root() { - return null; - } - - @Override - public ConfigOrigin origin() { - return null; - } - - @Override - public Config withFallback(ConfigMergeable other) { - return null; - } - - @Override - public Config resolve() { - return null; - } - - @Override - public Config resolve(ConfigResolveOptions options) { - return null; - } - - @Override - public boolean isResolved() { - return false; - } - - @Override - public Config resolveWith(Config source) { - return null; - } - - @Override - public Config resolveWith(Config source, ConfigResolveOptions options) { - return null; - } - - @Override - public void checkValid(Config reference, String... restrictToPaths) { - - } - - @Override - public boolean hasPath(String path) { - return true; - } - - @Override - public boolean hasPathOrNull(String path) { - return false; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set> entrySet() { - return null; - } - - @Override - public boolean getIsNull(String path) { - return false; - } - - @Override - public boolean getBoolean(String path) { - return false; - } - - @Override - public Number getNumber(String path) { - return null; - } - - @Override - public int getInt(String path) { - return 0; - } - - @Override - public long getLong(String path) { - return 0; - } - - @Override - public double getDouble(String path) { - return 0; - } - - @Override - public String getString(String path) { - return (String) _map.getOrDefault(path, "1"); - } - - @Override - public > T getEnum(Class enumClass, String path) { - return null; - } - - @Override - public ConfigObject getObject(String path) { - return null; - } - - @Override - public Config getConfig(String path) { - return null; - } - - @Override - public Object getAnyRef(String path) { - return null; - } - - @Override - public ConfigValue getValue(String path) { - return null; - } - - @Override - public Long getBytes(String path) { - return null; - } - - @Override - public ConfigMemorySize getMemorySize(String path) { - return null; - } - - @Override - public Long getMilliseconds(String path) { - return null; - } - - @Override - public Long getNanoseconds(String path) { - return null; - } - - @Override - public long getDuration(String path, TimeUnit unit) { - return 0; - } - - @Override - public Duration getDuration(String path) { - return null; - } - - @Override - public Period getPeriod(String path) { - return null; - } - - @Override - public TemporalAmount getTemporal(String path) { - return null; - } - - @Override - public ConfigList getList(String path) { - return null; - } - - @Override - public List getBooleanList(String path) { - return null; - } - - @Override - public List getNumberList(String path) { - return null; - } - - @Override - public List getIntList(String path) { - return null; - } - - @Override - public List getLongList(String path) { - return null; - } - - @Override - public List getDoubleList(String path) { - return null; - } - - @Override - public List getStringList(String path) { - return null; - } - - @Override - public > List getEnumList(Class enumClass, String path) { - return null; - } - - @Override - public List getObjectList(String path) { - return null; - } - - @Override - public List getConfigList(String path) { - return null; - } - - @Override - public List getAnyRefList(String path) { - return null; - } - - @Override - public List getBytesList(String path) { - return null; - } - - @Override - public List getMemorySizeList(String path) { - return null; - } - - @Override - public List getMillisecondsList(String path) { - return null; - } - - @Override - public List getNanosecondsList(String path) { - return null; - } - - @Override - public List getDurationList(String path, TimeUnit unit) { - return null; - } - - @Override - public List getDurationList(String path) { - return null; - } - - @Override - public Config withOnlyPath(String path) { - return null; - } - - @Override - public Config withoutPath(String path) { - return null; - } - - @Override - public Config atPath(String path) { - return null; - } - - @Override - public Config atKey(String key) { - return null; - } - - @Override - public Config withValue(String path, ConfigValue value) { - _map.put(path, value.unwrapped()); - return this; - } - }; + private static final com.typesafe.config.Config CONFIG = + new Config() { + + private final Map _map = new HashMap<>(); + + @Override + public ConfigObject root() { + return null; + } + + @Override + public ConfigOrigin origin() { + return null; + } + + @Override + public Config withFallback(ConfigMergeable other) { + return null; + } + + @Override + public Config resolve() { + return null; + } + + @Override + public Config resolve(ConfigResolveOptions options) { + return null; + } + + @Override + public boolean isResolved() { + return false; + } + + @Override + public Config resolveWith(Config source) { + return null; + } + + @Override + public Config resolveWith(Config source, ConfigResolveOptions options) { + return null; + } + + @Override + public void checkValid(Config reference, String... restrictToPaths) {} + + @Override + public boolean hasPath(String path) { + return true; + } + + @Override + public boolean hasPathOrNull(String path) { + return false; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set> entrySet() { + return null; + } + + @Override + public boolean getIsNull(String path) { + return false; + } + + @Override + public boolean getBoolean(String path) { + return false; + } + + @Override + public Number getNumber(String path) { + return null; + } + + @Override + public int getInt(String path) { + return 0; + } + + @Override + public long getLong(String path) { + return 0; + } + + @Override + public double getDouble(String path) { + return 0; + } + + @Override + public String getString(String path) { + return (String) _map.getOrDefault(path, "1"); + } + + @Override + public > T getEnum(Class enumClass, String path) { + return null; + } + + @Override + public ConfigObject getObject(String path) { + return null; + } + + @Override + public Config getConfig(String path) { + return null; + } + + @Override + public Object getAnyRef(String path) { + return null; + } + + @Override + public ConfigValue getValue(String path) { + return null; + } + + @Override + public Long getBytes(String path) { + return null; + } + + @Override + public ConfigMemorySize getMemorySize(String path) { + return null; + } + + @Override + public Long getMilliseconds(String path) { + return null; + } + + @Override + public Long getNanoseconds(String path) { + return null; + } + + @Override + public long getDuration(String path, TimeUnit unit) { + return 0; + } + + @Override + public Duration getDuration(String path) { + return null; + } + + @Override + public Period getPeriod(String path) { + return null; + } + + @Override + public TemporalAmount getTemporal(String path) { + return null; + } + + @Override + public ConfigList getList(String path) { + return null; + } + + @Override + public List getBooleanList(String path) { + return null; + } + + @Override + public List getNumberList(String path) { + return null; + } + + @Override + public List getIntList(String path) { + return null; + } + + @Override + public List getLongList(String path) { + return null; + } + + @Override + public List getDoubleList(String path) { + return null; + } + + @Override + public List getStringList(String path) { + return null; + } + + @Override + public > List getEnumList(Class enumClass, String path) { + return null; + } + + @Override + public List getObjectList(String path) { + return null; + } + + @Override + public List getConfigList(String path) { + return null; + } + + @Override + public List getAnyRefList(String path) { + return null; + } + + @Override + public List getBytesList(String path) { + return null; + } + + @Override + public List getMemorySizeList(String path) { + return null; + } + + @Override + public List getMillisecondsList(String path) { + return null; + } + + @Override + public List getNanosecondsList(String path) { + return null; + } + + @Override + public List getDurationList(String path, TimeUnit unit) { + return null; + } + + @Override + public List getDurationList(String path) { + return null; + } + + @Override + public Config withOnlyPath(String path) { + return null; + } + + @Override + public Config withoutPath(String path) { + return null; + } + + @Override + public Config atPath(String path) { + return null; + } + + @Override + public Config atKey(String key) { + return null; + } + + @Override + public Config withValue(String path, ConfigValue value) { + _map.put(path, value.unwrapped()); + return this; + } + }; @Test public void readTimeoutPropagation() { CONFIG.withValue(OIDC_READ_TIMEOUT, ConfigValueFactory.fromAnyRef("10000")); - OidcConfigs oidcConfigs = new OidcConfigs(CONFIG); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG); + OidcConfigs oidcConfigs = oidcConfigsBuilder.build(); OidcProvider oidcProvider = new OidcProvider(oidcConfigs); assertEquals(10000, ((OidcClient) oidcProvider.client()).getConfiguration().getReadTimeout()); } + + @Test + public void readPreferredJwsAlgorithmPropagationFromConfig() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("RS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } + + @Test + public void readPreferredJwsAlgorithmPropagationFromJSON() { + final String SSO_SETTINGS_JSON_STR = new JSONObject().put(PREFERRED_JWS_ALGORITHM, "HS256").toString(); + CONFIG.withValue(OIDC_PREFERRED_JWS_ALGORITHM, ConfigValueFactory.fromAnyRef("RS256")); + OidcConfigs.Builder oidcConfigsBuilder = new OidcConfigs.Builder(); + oidcConfigsBuilder.from(CONFIG, SSO_SETTINGS_JSON_STR); + OidcConfigs oidcConfigs = new OidcConfigs(oidcConfigsBuilder); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + assertEquals("HS256", ((OidcClient) oidcProvider.client()).getConfiguration().getPreferredJwsAlgorithm().toString()); + } } diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java index 428566ae3f4247..6767fa56374692 100644 --- a/datahub-frontend/test/utils/SearchUtilTest.java +++ b/datahub-frontend/test/utils/SearchUtilTest.java @@ -1,17 +1,18 @@ package utils; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; + public class SearchUtilTest { - @Test - public void testEscapeForwardSlash() { - // escape "/" - assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); - // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*" - assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); - assertEquals("", ""); - assertEquals("foo", "foo"); - } + @Test + public void testEscapeForwardSlash() { + // escape "/" + assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); + // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to + // retain the regex behaviour with "*" + assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); + assertEquals("", ""); + assertEquals("foo", "foo"); + } } diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fba0031351b588..49a7fa7fbfbc2f 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -1,16 +1,18 @@ plugins { + id 'java' id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } -apply plugin: 'java' + dependencies { - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:auth-impl') implementation project(':metadata-service:auth-config') implementation project(':metadata-service:configuration') implementation project(':metadata-service:services') implementation project(':metadata-io') implementation project(':metadata-utils') + implementation project(':metadata-models') implementation externalDependency.graphqlJava implementation externalDependency.graphqlJavaScalars @@ -20,6 +22,7 @@ dependencies { implementation externalDependency.opentelemetryAnnotations implementation externalDependency.slf4jApi + implementation externalDependency.springContext compileOnly externalDependency.lombok annotationProcessor externalDependency.lombok @@ -29,27 +32,16 @@ dependencies { graphqlCodegen { // For options: https://github.com/kobylynskyi/graphql-java-codegen/blob/master/docs/codegen-options.md - graphqlSchemaPaths = [ - "$projectDir/src/main/resources/entity.graphql".toString(), - "$projectDir/src/main/resources/app.graphql".toString(), - "$projectDir/src/main/resources/search.graphql".toString(), - "$projectDir/src/main/resources/analytics.graphql".toString(), - "$projectDir/src/main/resources/recommendation.graphql".toString(), - "$projectDir/src/main/resources/ingestion.graphql".toString(), - "$projectDir/src/main/resources/auth.graphql".toString(), - "$projectDir/src/main/resources/timeline.graphql".toString(), - "$projectDir/src/main/resources/tests.graphql".toString(), - "$projectDir/src/main/resources/step.graphql".toString(), - "$projectDir/src/main/resources/lineage.graphql".toString(), - ] - outputDir = new File("$projectDir/src/mainGeneratedGraphQL/java") + graphqlSchemaPaths = fileTree(dir: "${projectDir}/src/main/resources", include: '**/*.graphql').collect { it.absolutePath } + outputDir = new File("${projectDir}/src/mainGeneratedGraphQL/java") packageName = "com.linkedin.datahub.graphql.generated" + generateToString = true generateApis = true generateParameterizedFieldsResolvers = false modelValidationAnnotation = "@javax.annotation.Nonnull" customTypesMapping = [ - Long: "Long", - Float: "Float" + Long: "Long", + Float: "Float" ] } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 4488f27c19d808..69306862a46ef7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -1,29 +1,42 @@ package com.linkedin.datahub.graphql; -/** - * Constants relating to GraphQL type system & execution. - */ -public class Constants { +import com.google.common.collect.ImmutableSet; +import java.util.Set; - private Constants() { }; +/** Constants relating to GraphQL type system & execution. */ +public class Constants { - public static final String URN_FIELD_NAME = "urn"; - public static final String URNS_FIELD_NAME = "urns"; - public static final String GMS_SCHEMA_FILE = "entity.graphql"; - public static final String SEARCH_SCHEMA_FILE = "search.graphql"; - public static final String APP_SCHEMA_FILE = "app.graphql"; - public static final String AUTH_SCHEMA_FILE = "auth.graphql"; - public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; - public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; - public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; - public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; - public static final String TESTS_SCHEMA_FILE = "tests.graphql"; - public static final String STEPS_SCHEMA_FILE = "step.graphql"; - public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; - public static final String BROWSE_PATH_DELIMITER = "/"; - public static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + private Constants() {} - public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final String URN_FIELD_NAME = "urn"; + public static final String URNS_FIELD_NAME = "urns"; + public static final String GMS_SCHEMA_FILE = "entity.graphql"; + public static final String SEARCH_SCHEMA_FILE = "search.graphql"; + public static final String APP_SCHEMA_FILE = "app.graphql"; + public static final String AUTH_SCHEMA_FILE = "auth.graphql"; + public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; + public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; + public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; + public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; + public static final String TESTS_SCHEMA_FILE = "tests.graphql"; + public static final String STEPS_SCHEMA_FILE = "step.graphql"; + public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String PROPERTIES_SCHEMA_FILE = "properties.graphql"; + public static final String FORMS_SCHEMA_FILE = "forms.graphql"; + public static final String ASSERTIONS_SCHEMA_FILE = "assertions.graphql"; + public static final String COMMON_SCHEMA_FILE = "common.graphql"; + public static final String INCIDENTS_SCHEMA_FILE = "incident.graphql"; + public static final String CONTRACTS_SCHEMA_FILE = "contract.graphql"; + public static final String CONNECTIONS_SCHEMA_FILE = "connection.graphql"; + public static final String BROWSE_PATH_DELIMITER = "/"; + public static final String BROWSE_PATH_V2_DELIMITER = "␟"; + public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final Set DEFAULT_PERSONA_URNS = + ImmutableSet.of( + "urn:li:dataHubPersona:technicalUser", + "urn:li:dataHubPersona:businessUser", + "urn:li:dataHubPersona:dataLeader", + "urn:li:dataHubPersona:dataSteward"); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index b99f712034fe03..db9bf304a1085b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; +import static graphql.scalars.ExtendedScalars.*; + import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; @@ -18,6 +22,7 @@ import com.linkedin.datahub.graphql.analytics.resolver.GetMetadataAnalyticsResolver; import com.linkedin.datahub.graphql.analytics.resolver.IsAnalyticsEnabledResolver; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AccessToken; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; @@ -29,12 +34,15 @@ import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.BusinessAttributeAssociation; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.ChartInfo; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.generated.CorpUserViewsSettings; import com.linkedin.datahub.graphql.generated.Dashboard; @@ -42,33 +50,42 @@ import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.DataFlow; +import com.linkedin.datahub.graphql.generated.DataHubConnection; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataJobInputOutput; +import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; +import com.linkedin.datahub.graphql.generated.DataQualityContract; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; +import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.EntityRelationship; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import com.linkedin.datahub.graphql.generated.FreshnessContract; import com.linkedin.datahub.graphql.generated.GetRootGlossaryNodesResult; import com.linkedin.datahub.graphql.generated.GetRootGlossaryTermsResult; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.generated.GlossaryTermAssociation; +import com.linkedin.datahub.graphql.generated.IncidentSource; import com.linkedin.datahub.graphql.generated.IngestionSource; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.LineageRelationship; import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; +import com.linkedin.datahub.graphql.generated.ListBusinessAttributesResult; import com.linkedin.datahub.graphql.generated.ListDomainsResult; import com.linkedin.datahub.graphql.generated.ListGroupsResult; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.ListTestsResult; import com.linkedin.datahub.graphql.generated.ListViewsResult; -import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -78,48 +95,71 @@ import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; +import com.linkedin.datahub.graphql.generated.MatchedField; +import com.linkedin.datahub.graphql.generated.MetadataAttribution; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.Owner; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.ParentDomainsResult; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterionValue; import com.linkedin.datahub.graphql.generated.QueryEntity; +import com.linkedin.datahub.graphql.generated.QueryProperties; import com.linkedin.datahub.graphql.generated.QuerySubject; import com.linkedin.datahub.graphql.generated.QuickFilter; import com.linkedin.datahub.graphql.generated.RecommendationContent; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import com.linkedin.datahub.graphql.generated.SchemaContract; +import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SiblingProperties; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.TestResult; +import com.linkedin.datahub.graphql.generated.TypeQualifier; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.resolvers.MeResolver; import com.linkedin.datahub.graphql.resolvers.assertion.AssertionRunEventResolver; import com.linkedin.datahub.graphql.resolvers.assertion.DeleteAssertionResolver; import com.linkedin.datahub.graphql.resolvers.assertion.EntityAssertionsResolver; +import com.linkedin.datahub.graphql.resolvers.assertion.ReportAssertionResultResolver; +import com.linkedin.datahub.graphql.resolvers.assertion.UpsertCustomAssertionResolver; import com.linkedin.datahub.graphql.resolvers.auth.CreateAccessTokenResolver; +import com.linkedin.datahub.graphql.resolvers.auth.DebugAccessResolver; +import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenMetadataResolver; import com.linkedin.datahub.graphql.resolvers.auth.GetAccessTokenResolver; import com.linkedin.datahub.graphql.resolvers.auth.ListAccessTokensResolver; import com.linkedin.datahub.graphql.resolvers.auth.RevokeAccessTokenResolver; import com.linkedin.datahub.graphql.resolvers.browse.BrowsePathsResolver; import com.linkedin.datahub.graphql.resolvers.browse.BrowseResolver; import com.linkedin.datahub.graphql.resolvers.browse.EntityBrowsePathsResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.AddBusinessAttributeResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.CreateBusinessAttributeResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.DeleteBusinessAttributeResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.ListBusinessAttributesResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.RemoveBusinessAttributeResolver; +import com.linkedin.datahub.graphql.resolvers.businessattribute.UpdateBusinessAttributeResolver; import com.linkedin.datahub.graphql.resolvers.chart.BrowseV2Resolver; import com.linkedin.datahub.graphql.resolvers.chart.ChartStatsSummaryResolver; import com.linkedin.datahub.graphql.resolvers.config.AppConfigResolver; +import com.linkedin.datahub.graphql.resolvers.connection.UpsertConnectionResolver; import com.linkedin.datahub.graphql.resolvers.container.ContainerEntitiesResolver; import com.linkedin.datahub.graphql.resolvers.container.ParentContainersResolver; import com.linkedin.datahub.graphql.resolvers.dashboard.DashboardStatsSummaryResolver; import com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsResolver; +import com.linkedin.datahub.graphql.resolvers.datacontract.EntityDataContractResolver; +import com.linkedin.datahub.graphql.resolvers.datacontract.UpsertDataContractResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.BatchSetDataProductResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.CreateDataProductResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.DeleteDataProductResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.ListDataProductAssetsResolver; import com.linkedin.datahub.graphql.resolvers.dataproduct.UpdateDataProductResolver; -import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetStatsSummaryResolver; import com.linkedin.datahub.graphql.resolvers.dataset.DatasetUsageStatsResolver; +import com.linkedin.datahub.graphql.resolvers.dataset.IsAssignedToMeResolver; import com.linkedin.datahub.graphql.resolvers.deprecation.UpdateDeprecationResolver; import com.linkedin.datahub.graphql.resolvers.domain.CreateDomainResolver; import com.linkedin.datahub.graphql.resolvers.domain.DeleteDomainResolver; @@ -131,6 +171,15 @@ import com.linkedin.datahub.graphql.resolvers.embed.UpdateEmbedResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityExistsResolver; import com.linkedin.datahub.graphql.resolvers.entity.EntityPrivilegesResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchAssignFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.BatchRemoveFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.CreateDynamicFormAssignmentResolver; +import com.linkedin.datahub.graphql.resolvers.form.CreateFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.DeleteFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.IsFormAssignedToMeResolver; +import com.linkedin.datahub.graphql.resolvers.form.SubmitFormPromptResolver; +import com.linkedin.datahub.graphql.resolvers.form.UpdateFormResolver; +import com.linkedin.datahub.graphql.resolvers.form.VerifyFormResolver; import com.linkedin.datahub.graphql.resolvers.glossary.AddRelatedTermsResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryNodeResolver; import com.linkedin.datahub.graphql.resolvers.glossary.CreateGlossaryTermResolver; @@ -145,6 +194,10 @@ import com.linkedin.datahub.graphql.resolvers.group.ListGroupsResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupMembersResolver; import com.linkedin.datahub.graphql.resolvers.group.RemoveGroupResolver; +import com.linkedin.datahub.graphql.resolvers.health.EntityHealthResolver; +import com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver; +import com.linkedin.datahub.graphql.resolvers.incident.RaiseIncidentResolver; +import com.linkedin.datahub.graphql.resolvers.incident.UpdateIncidentStatusResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CancelIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateIngestionExecutionRequestResolver; import com.linkedin.datahub.graphql.resolvers.ingest.execution.CreateTestConnectionRequestResolver; @@ -155,6 +208,7 @@ import com.linkedin.datahub.graphql.resolvers.ingest.secret.DeleteSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.GetSecretValuesResolver; import com.linkedin.datahub.graphql.resolvers.ingest.secret.ListSecretsResolver; +import com.linkedin.datahub.graphql.resolvers.ingest.secret.UpdateSecretResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.DeleteIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.GetIngestionSourceResolver; import com.linkedin.datahub.graphql.resolvers.ingest.source.ListIngestionSourcesResolver; @@ -211,6 +265,7 @@ import com.linkedin.datahub.graphql.resolvers.post.CreatePostResolver; import com.linkedin.datahub.graphql.resolvers.post.DeletePostResolver; import com.linkedin.datahub.graphql.resolvers.post.ListPostsResolver; +import com.linkedin.datahub.graphql.resolvers.post.UpdatePostResolver; import com.linkedin.datahub.graphql.resolvers.query.CreateQueryResolver; import com.linkedin.datahub.graphql.resolvers.query.DeleteQueryResolver; import com.linkedin.datahub.graphql.resolvers.query.ListQueriesResolver; @@ -230,11 +285,17 @@ import com.linkedin.datahub.graphql.resolvers.search.SearchAcrossEntitiesResolver; import com.linkedin.datahub.graphql.resolvers.search.SearchAcrossLineageResolver; import com.linkedin.datahub.graphql.resolvers.search.SearchResolver; +import com.linkedin.datahub.graphql.resolvers.settings.docPropagation.DocPropagationSettingsResolver; +import com.linkedin.datahub.graphql.resolvers.settings.docPropagation.UpdateDocPropagationSettingsResolver; import com.linkedin.datahub.graphql.resolvers.settings.user.UpdateCorpUserViewsSettingsResolver; import com.linkedin.datahub.graphql.resolvers.settings.view.GlobalViewsSettingsResolver; import com.linkedin.datahub.graphql.resolvers.settings.view.UpdateGlobalViewsSettingsResolver; import com.linkedin.datahub.graphql.resolvers.step.BatchGetStepStatesResolver; import com.linkedin.datahub.graphql.resolvers.step.BatchUpdateStepStatesResolver; +import com.linkedin.datahub.graphql.resolvers.structuredproperties.CreateStructuredPropertyResolver; +import com.linkedin.datahub.graphql.resolvers.structuredproperties.RemoveStructuredPropertiesResolver; +import com.linkedin.datahub.graphql.resolvers.structuredproperties.UpdateStructuredPropertyResolver; +import com.linkedin.datahub.graphql.resolvers.structuredproperties.UpsertStructuredPropertiesResolver; import com.linkedin.datahub.graphql.resolvers.tag.CreateTagResolver; import com.linkedin.datahub.graphql.resolvers.tag.DeleteTagResolver; import com.linkedin.datahub.graphql.resolvers.tag.SetTagColorResolver; @@ -249,6 +310,7 @@ import com.linkedin.datahub.graphql.resolvers.type.EntityInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver; +import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver; import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver; import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver; import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver; @@ -267,9 +329,11 @@ import com.linkedin.datahub.graphql.types.aspect.AspectType; import com.linkedin.datahub.graphql.types.assertion.AssertionType; import com.linkedin.datahub.graphql.types.auth.AccessTokenMetadataType; +import com.linkedin.datahub.graphql.types.businessattribute.BusinessAttributeType; import com.linkedin.datahub.graphql.types.chart.ChartType; import com.linkedin.datahub.graphql.types.common.mappers.OperationMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.connection.DataHubConnectionType; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupType; import com.linkedin.datahub.graphql.types.corpuser.CorpUserType; @@ -283,10 +347,16 @@ import com.linkedin.datahub.graphql.types.dataset.DatasetType; import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; +import com.linkedin.datahub.graphql.types.datatype.DataTypeType; import com.linkedin.datahub.graphql.types.domain.DomainType; -import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.CreateERModelRelationshipResolver; +import com.linkedin.datahub.graphql.types.ermodelrelationship.ERModelRelationshipType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.UpdateERModelRelationshipResolver; +import com.linkedin.datahub.graphql.types.form.FormType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; +import com.linkedin.datahub.graphql.types.incident.IncidentType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureType; import com.linkedin.datahub.graphql.types.mlmodel.MLModelGroupType; @@ -296,19 +366,24 @@ import com.linkedin.datahub.graphql.types.ownership.OwnershipType; import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; +import com.linkedin.datahub.graphql.types.restricted.RestrictedType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; +import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; import com.linkedin.datahub.graphql.types.view.DataHubViewType; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.client.UsageStatsJavaClient; import com.linkedin.metadata.config.DataHubConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.TestsConfiguration; import com.linkedin.metadata.config.ViewsConfiguration; import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; +import com.linkedin.metadata.connection.ConnectionService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.SiblingGraphService; @@ -316,8 +391,11 @@ import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.service.AssertionService; +import com.linkedin.metadata.service.BusinessAttributeService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -326,12 +404,13 @@ import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.version.GitVersion; -import com.linkedin.usage.UsageClient; import graphql.execution.DataFetcherResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import graphql.schema.StaticDataFetcher; import graphql.schema.idl.RuntimeWiring; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -341,7 +420,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -352,1496 +430,2781 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; -import static graphql.scalars.ExtendedScalars.*; - - /** - * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph. + * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the GMS + * graph. */ @Slf4j @Getter public class GmsGraphQLEngine { - private final EntityClient entityClient; - private final SystemEntityClient systemEntityClient; - private final GraphClient graphClient; - private final UsageClient usageClient; - private final SiblingGraphService siblingGraphService; - - private final EntityService entityService; - private final AnalyticsService analyticsService; - private final RecommendationsService recommendationsService; - private final EntityRegistry entityRegistry; - private final StatefulTokenService statefulTokenService; - private final SecretService secretService; - private final GitVersion gitVersion; - private final boolean supportsImpactAnalysis; - private final TimeseriesAspectService timeseriesAspectService; - private final TimelineService timelineService; - private final NativeUserService nativeUserService; - private final GroupService groupService; - private final RoleService roleService; - private final InviteTokenService inviteTokenService; - private final PostService postService; - private final SettingsService settingsService; - private final ViewService viewService; - private final OwnershipTypeService ownershipTypeService; - private final LineageService lineageService; - private final QueryService queryService; - private final DataProductService dataProductService; - - private final FeatureFlags featureFlags; - - private final IngestionConfiguration ingestionConfiguration; - private final AuthenticationConfiguration authenticationConfiguration; - private final AuthorizationConfiguration authorizationConfiguration; - private final VisualConfiguration visualConfiguration; - private final TelemetryConfiguration telemetryConfiguration; - private final TestsConfiguration testsConfiguration; - private final DataHubConfiguration datahubConfiguration; - private final ViewsConfiguration viewsConfiguration; - - private final DatasetType datasetType; - - private final RoleType roleType; - - private final CorpUserType corpUserType; - private final CorpGroupType corpGroupType; - private final ChartType chartType; - private final DashboardType dashboardType; - private final DataPlatformType dataPlatformType; - private final TagType tagType; - private final MLModelType mlModelType; - private final MLModelGroupType mlModelGroupType; - private final MLFeatureType mlFeatureType; - private final MLFeatureTableType mlFeatureTableType; - private final MLPrimaryKeyType mlPrimaryKeyType; - private final DataFlowType dataFlowType; - private final DataJobType dataJobType; - private final GlossaryTermType glossaryTermType; - private final GlossaryNodeType glossaryNodeType; - private final AspectType aspectType; - private final ContainerType containerType; - private final DomainType domainType; - private final NotebookType notebookType; - private final AssertionType assertionType; - private final VersionedDatasetType versionedDatasetType; - private final DataPlatformInstanceType dataPlatformInstanceType; - private final AccessTokenMetadataType accessTokenMetadataType; - private final TestType testType; - private final DataHubPolicyType dataHubPolicyType; - private final DataHubRoleType dataHubRoleType; - private final SchemaFieldType schemaFieldType; - private final DataHubViewType dataHubViewType; - private final QueryType queryType; - private final DataProductType dataProductType; - private final OwnershipType ownershipType; - - /** - * A list of GraphQL Plugins that extend the core engine - */ - private final List graphQLPlugins; - - /** - * Configures the graph objects that can be fetched primary key. - */ - public final List> entityTypes; - - /** - * Configures all graph objects - */ - public final List> loadableTypes; - - /** - * Configures the graph objects for owner - */ - public final List> ownerTypes; - - /** - * Configures the graph objects that can be searched. - */ - public final List> searchableTypes; - - /** - * Configures the graph objects that can be browsed. - */ - public final List> browsableTypes; - - public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { - - this.graphQLPlugins = List.of( + private final EntityClient entityClient; + private final SystemEntityClient systemEntityClient; + private final GraphClient graphClient; + private final UsageStatsJavaClient usageClient; + private final SiblingGraphService siblingGraphService; + + private final EntityService entityService; + private final AnalyticsService analyticsService; + private final RecommendationsService recommendationsService; + private final EntityRegistry entityRegistry; + private final StatefulTokenService statefulTokenService; + private final SecretService secretService; + private final GitVersion gitVersion; + private final boolean supportsImpactAnalysis; + private final TimeseriesAspectService timeseriesAspectService; + private final TimelineService timelineService; + private final NativeUserService nativeUserService; + private final GroupService groupService; + private final RoleService roleService; + private final InviteTokenService inviteTokenService; + private final PostService postService; + private final SettingsService settingsService; + private final ViewService viewService; + private final OwnershipTypeService ownershipTypeService; + private final LineageService lineageService; + private final QueryService queryService; + private final DataProductService dataProductService; + private final ERModelRelationshipService erModelRelationshipService; + private final FormService formService; + private final RestrictedService restrictedService; + private ConnectionService connectionService; + private AssertionService assertionService; + + private final BusinessAttributeService businessAttributeService; + private final FeatureFlags featureFlags; + + private final IngestionConfiguration ingestionConfiguration; + private final AuthenticationConfiguration authenticationConfiguration; + private final AuthorizationConfiguration authorizationConfiguration; + private final VisualConfiguration visualConfiguration; + private final TelemetryConfiguration telemetryConfiguration; + private final TestsConfiguration testsConfiguration; + private final DataHubConfiguration datahubConfiguration; + private final ViewsConfiguration viewsConfiguration; + + private final DatasetType datasetType; + + private final RoleType roleType; + + private final CorpUserType corpUserType; + private final CorpGroupType corpGroupType; + private final ChartType chartType; + private final DashboardType dashboardType; + private final DataPlatformType dataPlatformType; + private final TagType tagType; + private final MLModelType mlModelType; + private final MLModelGroupType mlModelGroupType; + private final MLFeatureType mlFeatureType; + private final MLFeatureTableType mlFeatureTableType; + private final MLPrimaryKeyType mlPrimaryKeyType; + private final DataFlowType dataFlowType; + private final DataJobType dataJobType; + private final GlossaryTermType glossaryTermType; + private final GlossaryNodeType glossaryNodeType; + private final AspectType aspectType; + private final DataHubConnectionType connectionType; + private final ContainerType containerType; + private final DomainType domainType; + private final NotebookType notebookType; + private final AssertionType assertionType; + private final VersionedDatasetType versionedDatasetType; + private final DataPlatformInstanceType dataPlatformInstanceType; + private final AccessTokenMetadataType accessTokenMetadataType; + private final TestType testType; + private final DataHubPolicyType dataHubPolicyType; + private final DataHubRoleType dataHubRoleType; + private final SchemaFieldType schemaFieldType; + private final ERModelRelationshipType erModelRelationshipType; + private final DataHubViewType dataHubViewType; + private final QueryType queryType; + private final DataProductType dataProductType; + private final OwnershipType ownershipType; + private final StructuredPropertyType structuredPropertyType; + private final DataTypeType dataTypeType; + private final EntityTypeType entityTypeType; + private final FormType formType; + private final IncidentType incidentType; + private final RestrictedType restrictedType; + + private final int graphQLQueryComplexityLimit; + private final int graphQLQueryDepthLimit; + private final boolean graphQLQueryIntrospectionEnabled; + + private final BusinessAttributeType businessAttributeType; + + /** A list of GraphQL Plugins that extend the core engine */ + private final List graphQLPlugins; + + /** Configures the graph objects that can be fetched primary key. */ + public final List> entityTypes; + + /** Configures all graph objects */ + public final List> loadableTypes; + + /** Configures the graph objects for owner */ + public final List> ownerTypes; + + /** Configures the graph objects that can be searched. */ + public final List> searchableTypes; + + /** Configures the graph objects that can be browsed. */ + public final List> browsableTypes; + + public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { + + this.graphQLPlugins = + List.of( // Add new plugins here - ); - - this.graphQLPlugins.forEach(plugin -> plugin.init(args)); - - this.entityClient = args.entityClient; - this.systemEntityClient = args.systemEntityClient; - this.graphClient = args.graphClient; - this.usageClient = args.usageClient; - this.siblingGraphService = args.siblingGraphService; - - this.analyticsService = args.analyticsService; - this.entityService = args.entityService; - this.recommendationsService = args.recommendationsService; - this.statefulTokenService = args.statefulTokenService; - this.secretService = args.secretService; - this.entityRegistry = args.entityRegistry; - this.gitVersion = args.gitVersion; - this.supportsImpactAnalysis = args.supportsImpactAnalysis; - this.timeseriesAspectService = args.timeseriesAspectService; - this.timelineService = args.timelineService; - this.nativeUserService = args.nativeUserService; - this.groupService = args.groupService; - this.roleService = args.roleService; - this.inviteTokenService = args.inviteTokenService; - this.postService = args.postService; - this.viewService = args.viewService; - this.ownershipTypeService = args.ownershipTypeService; - this.settingsService = args.settingsService; - this.lineageService = args.lineageService; - this.queryService = args.queryService; - this.dataProductService = args.dataProductService; - - this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); - this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); - this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); - this.visualConfiguration = args.visualConfiguration; - this.telemetryConfiguration = args.telemetryConfiguration; - this.testsConfiguration = args.testsConfiguration; - this.datahubConfiguration = args.datahubConfiguration; - this.viewsConfiguration = args.viewsConfiguration; - this.featureFlags = args.featureFlags; - - this.datasetType = new DatasetType(entityClient); - this.roleType = new RoleType(entityClient); - this.corpUserType = new CorpUserType(entityClient, featureFlags); - this.corpGroupType = new CorpGroupType(entityClient); - this.chartType = new ChartType(entityClient); - this.dashboardType = new DashboardType(entityClient); - this.dataPlatformType = new DataPlatformType(entityClient); - this.tagType = new TagType(entityClient); - this.mlModelType = new MLModelType(entityClient); - this.mlModelGroupType = new MLModelGroupType(entityClient); - this.mlFeatureType = new MLFeatureType(entityClient); - this.mlFeatureTableType = new MLFeatureTableType(entityClient); - this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); - this.dataFlowType = new DataFlowType(entityClient); - this.dataJobType = new DataJobType(entityClient); - this.glossaryTermType = new GlossaryTermType(entityClient); - this.glossaryNodeType = new GlossaryNodeType(entityClient); - this.aspectType = new AspectType(entityClient); - this.containerType = new ContainerType(entityClient); - this.domainType = new DomainType(entityClient); - this.notebookType = new NotebookType(entityClient); - this.assertionType = new AssertionType(entityClient); - this.versionedDatasetType = new VersionedDatasetType(entityClient); - this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); - this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); - this.testType = new TestType(entityClient); - this.dataHubPolicyType = new DataHubPolicyType(entityClient); - this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); - this.dataHubViewType = new DataHubViewType(entityClient); - this.queryType = new QueryType(entityClient); - this.dataProductType = new DataProductType(entityClient); - this.ownershipType = new OwnershipType(entityClient); - - // Init Lists - this.entityTypes = ImmutableList.of( - datasetType, - roleType, - corpUserType, - corpGroupType, - dataPlatformType, - chartType, - dashboardType, - tagType, - mlModelType, - mlModelGroupType, - mlFeatureType, - mlFeatureTableType, - mlPrimaryKeyType, - dataFlowType, - dataJobType, - glossaryTermType, - glossaryNodeType, - containerType, - notebookType, - domainType, - assertionType, - versionedDatasetType, - dataPlatformInstanceType, - accessTokenMetadataType, - testType, - dataHubPolicyType, - dataHubRoleType, - schemaFieldType, - dataHubViewType, - queryType, - dataProductType, - ownershipType - ); - this.loadableTypes = new ArrayList<>(entityTypes); - // Extend loadable types with types from the plugins - // This allows us to offer search and browse capabilities out of the box for those types - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - this.loadableTypes.addAll(pluginLoadableTypes); - } - } - this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); - this.searchableTypes = loadableTypes.stream() + ); + + this.graphQLPlugins.forEach(plugin -> plugin.init(args)); + + this.entityClient = args.entityClient; + this.systemEntityClient = args.systemEntityClient; + this.graphClient = args.graphClient; + this.usageClient = args.usageClient; + this.siblingGraphService = args.siblingGraphService; + + this.analyticsService = args.analyticsService; + this.entityService = args.entityService; + this.recommendationsService = args.recommendationsService; + this.statefulTokenService = args.statefulTokenService; + this.secretService = args.secretService; + this.entityRegistry = args.entityRegistry; + this.gitVersion = args.gitVersion; + this.supportsImpactAnalysis = args.supportsImpactAnalysis; + this.timeseriesAspectService = args.timeseriesAspectService; + this.timelineService = args.timelineService; + this.nativeUserService = args.nativeUserService; + this.groupService = args.groupService; + this.roleService = args.roleService; + this.inviteTokenService = args.inviteTokenService; + this.postService = args.postService; + this.viewService = args.viewService; + this.ownershipTypeService = args.ownershipTypeService; + this.settingsService = args.settingsService; + this.lineageService = args.lineageService; + this.queryService = args.queryService; + this.erModelRelationshipService = args.erModelRelationshipService; + this.dataProductService = args.dataProductService; + this.formService = args.formService; + this.restrictedService = args.restrictedService; + this.connectionService = args.connectionService; + this.assertionService = args.assertionService; + + this.businessAttributeService = args.businessAttributeService; + this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); + this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); + this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); + this.visualConfiguration = args.visualConfiguration; + this.telemetryConfiguration = args.telemetryConfiguration; + this.testsConfiguration = args.testsConfiguration; + this.datahubConfiguration = args.datahubConfiguration; + this.viewsConfiguration = args.viewsConfiguration; + this.featureFlags = args.featureFlags; + + this.datasetType = new DatasetType(entityClient); + this.roleType = new RoleType(entityClient); + this.corpUserType = new CorpUserType(entityClient, featureFlags); + this.corpGroupType = new CorpGroupType(entityClient); + this.chartType = new ChartType(entityClient); + this.dashboardType = new DashboardType(entityClient); + this.dataPlatformType = new DataPlatformType(entityClient); + this.tagType = new TagType(entityClient); + this.mlModelType = new MLModelType(entityClient); + this.mlModelGroupType = new MLModelGroupType(entityClient); + this.mlFeatureType = new MLFeatureType(entityClient); + this.mlFeatureTableType = new MLFeatureTableType(entityClient); + this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); + this.dataFlowType = new DataFlowType(entityClient); + this.dataJobType = new DataJobType(entityClient); + this.glossaryTermType = new GlossaryTermType(entityClient); + this.glossaryNodeType = new GlossaryNodeType(entityClient); + this.aspectType = new AspectType(entityClient); + this.connectionType = new DataHubConnectionType(entityClient, secretService); + this.containerType = new ContainerType(entityClient); + this.domainType = new DomainType(entityClient); + this.notebookType = new NotebookType(entityClient); + this.assertionType = new AssertionType(entityClient); + this.versionedDatasetType = new VersionedDatasetType(entityClient); + this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); + this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); + this.testType = new TestType(entityClient); + this.dataHubPolicyType = new DataHubPolicyType(entityClient); + this.dataHubRoleType = new DataHubRoleType(entityClient); + this.schemaFieldType = new SchemaFieldType(entityClient, featureFlags); + this.erModelRelationshipType = new ERModelRelationshipType(entityClient, featureFlags); + this.dataHubViewType = new DataHubViewType(entityClient); + this.queryType = new QueryType(entityClient); + this.dataProductType = new DataProductType(entityClient); + this.ownershipType = new OwnershipType(entityClient); + this.structuredPropertyType = new StructuredPropertyType(entityClient); + this.dataTypeType = new DataTypeType(entityClient); + this.entityTypeType = new EntityTypeType(entityClient); + this.formType = new FormType(entityClient); + this.incidentType = new IncidentType(entityClient); + this.restrictedType = new RestrictedType(entityClient, restrictedService); + + this.graphQLQueryComplexityLimit = args.graphQLQueryComplexityLimit; + this.graphQLQueryDepthLimit = args.graphQLQueryDepthLimit; + this.graphQLQueryIntrospectionEnabled = args.graphQLQueryIntrospectionEnabled; + + this.businessAttributeType = new BusinessAttributeType(entityClient); + // Init Lists + this.entityTypes = + new ArrayList<>( + ImmutableList.of( + datasetType, + roleType, + corpUserType, + corpGroupType, + dataPlatformType, + chartType, + dashboardType, + tagType, + mlModelType, + mlModelGroupType, + mlFeatureType, + mlFeatureTableType, + mlPrimaryKeyType, + dataFlowType, + dataJobType, + glossaryTermType, + glossaryNodeType, + connectionType, + containerType, + notebookType, + domainType, + assertionType, + versionedDatasetType, + dataPlatformInstanceType, + accessTokenMetadataType, + testType, + dataHubPolicyType, + dataHubRoleType, + schemaFieldType, + erModelRelationshipType, + dataHubViewType, + queryType, + dataProductType, + ownershipType, + structuredPropertyType, + dataTypeType, + entityTypeType, + formType, + incidentType, + restrictedType, + businessAttributeType)); + this.loadableTypes = new ArrayList<>(entityTypes); + // Extend loadable types with types from the plugins + // This allows us to offer search and browse capabilities out of the box for + // those types + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + this.entityTypes.addAll(plugin.getEntityTypes()); + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + this.loadableTypes.addAll(pluginLoadableTypes); + } + } + this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); + this.searchableTypes = + loadableTypes.stream() .filter(type -> (type instanceof SearchableEntityType)) .map(type -> (SearchableEntityType) type) .collect(Collectors.toList()); - this.browsableTypes = loadableTypes.stream() + this.browsableTypes = + loadableTypes.stream() .filter(type -> (type instanceof BrowsableEntityType)) .map(type -> (BrowsableEntityType) type) .collect(Collectors.toList()); - } + } - /** - * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from - * a {@link LoadableType}. - */ - public Map>> loaderSuppliers(final Collection> loadableTypes) { - return loadableTypes - .stream() - .collect(Collectors.toMap( + /** + * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link + * LoadableType}. + */ + public Map>> loaderSuppliers( + final Collection> loadableTypes) { + return loadableTypes.stream() + .collect( + Collectors.toMap( LoadableType::name, - (graphType) -> (context) -> createDataLoader(graphType, context) - )); - } - - /** - * Final call to wire up any extra resolvers the plugin might want to add on - * @param builder - */ - private void configurePluginResolvers(final RuntimeWiring.Builder builder) { - this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); - } - + (graphType) -> (context) -> createDataLoader(graphType, context))); + } - public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { - configureQueryResolvers(builder); - configureMutationResolvers(builder); - configureGenericEntityResolvers(builder); - configureDatasetResolvers(builder); - configureCorpUserResolvers(builder); - configureCorpGroupResolvers(builder); - configureDashboardResolvers(builder); - configureNotebookResolvers(builder); - configureChartResolvers(builder); - configureTypeResolvers(builder); - configureTypeExtensions(builder); - configureTagAssociationResolver(builder); - configureGlossaryTermAssociationResolver(builder); - configureDataJobResolvers(builder); - configureDataFlowResolvers(builder); - configureMLFeatureTableResolvers(builder); - configureGlossaryRelationshipResolvers(builder); - configureIngestionSourceResolvers(builder); - configureAnalyticsResolvers(builder); - configureContainerResolvers(builder); - configureDataPlatformInstanceResolvers(builder); - configureGlossaryTermResolvers(builder); - configureOrganisationRoleResolvers(builder); - configureGlossaryNodeResolvers(builder); - configureDomainResolvers(builder); - configureDataProductResolvers(builder); - configureAssertionResolvers(builder); - configurePolicyResolvers(builder); - configureDataProcessInstanceResolvers(builder); - configureVersionedDatasetResolvers(builder); - configureAccessAccessTokenMetadataResolvers(builder); - configureTestResultResolvers(builder); - configureRoleResolvers(builder); - configureSchemaFieldResolvers(builder); - configureEntityPathResolvers(builder); - configureViewResolvers(builder); - configureQueryEntityResolvers(builder); - configureOwnershipTypeResolver(builder); - configurePluginResolvers(builder); - } - - private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { - builder.type("Role", typeWiring -> typeWiring + /** + * Final call to wire up any extra resolvers the plugin might want to add on + * + * @param builder + */ + private void configurePluginResolvers(final RuntimeWiring.Builder builder) { + this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); + } + + public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { + configureQueryResolvers(builder); + configureMutationResolvers(builder); + configureGenericEntityResolvers(builder); + configureDatasetResolvers(builder); + configureCorpUserResolvers(builder); + configureCorpGroupResolvers(builder); + configureDashboardResolvers(builder); + configureNotebookResolvers(builder); + configureChartResolvers(builder); + configureTypeResolvers(builder); + configureTypeExtensions(builder); + configureTagAssociationResolver(builder); + configureGlossaryTermAssociationResolver(builder); + configureDataJobResolvers(builder); + configureDataFlowResolvers(builder); + configureMLFeatureTableResolvers(builder); + configureGlossaryRelationshipResolvers(builder); + configureIngestionSourceResolvers(builder); + configureAnalyticsResolvers(builder); + configureContainerResolvers(builder); + configureDataPlatformInstanceResolvers(builder); + configureGlossaryTermResolvers(builder); + configureOrganisationRoleResolvers(builder); + configureGlossaryNodeResolvers(builder); + configureDomainResolvers(builder); + configureDataProductResolvers(builder); + configureAssertionResolvers(builder); + configureContractResolvers(builder); + configurePolicyResolvers(builder); + configureDataProcessInstanceResolvers(builder); + configureVersionedDatasetResolvers(builder); + configureAccessAccessTokenMetadataResolvers(builder); + configureTestResultResolvers(builder); + configureDataHubRoleResolvers(builder); + configureSchemaFieldResolvers(builder); + configureERModelRelationshipResolvers(builder); + configureEntityPathResolvers(builder); + configureResolvedAuditStampResolvers(builder); + configureViewResolvers(builder); + configureQueryEntityResolvers(builder); + configureOwnershipTypeResolver(builder); + configurePluginResolvers(builder); + configureStructuredPropertyResolvers(builder); + configureFormResolvers(builder); + configureIncidentResolvers(builder); + configureRestrictedResolvers(builder); + configureRoleResolvers(builder); + configureBusinessAttributeResolver(builder); + configureBusinessAttributeAssociationResolver(builder); + configureConnectionResolvers(builder); + configureDeprecationResolvers(builder); + configureMetadataAttributionResolver(builder); + } + + private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("RoleAssociation", typeWiring -> typeWiring - .dataFetcher("role", - new LoadableTypeResolver<>(roleType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation) - env.getSource()).getRole().getUrn())) - ); - builder.type("RoleUser", typeWiring -> typeWiring - .dataFetcher("user", - new LoadableTypeResolver<>(corpUserType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser) - env.getSource()).getUser().getUrn())) - ); - } - - public GraphQLEngine.Builder builder() { - final GraphQLEngine.Builder builder = GraphQLEngine.builder(); - builder - .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) - .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); - - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - List pluginSchemaFiles = plugin.getSchemaFiles(); - if (pluginSchemaFiles != null) { - pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); - } - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); - } - } - builder - .addDataLoaders(loaderSuppliers(loadableTypes)) - .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); - return builder; + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); + builder.type( + "RoleAssociation", + typeWiring -> + typeWiring.dataFetcher( + "role", + new LoadableTypeResolver<>( + roleType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource()) + .getRole() + .getUrn()))); + builder.type( + "RoleUser", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource()) + .getUser() + .getUrn()))); + } + + public GraphQLEngine.Builder builder() { + final GraphQLEngine.Builder builder = GraphQLEngine.builder(); + builder + .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) + .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(PROPERTIES_SCHEMA_FILE)) + .addSchema(fileBasedSchema(FORMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(CONNECTIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ASSERTIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INCIDENTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(CONTRACTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(COMMON_SCHEMA_FILE)); + + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + List pluginSchemaFiles = plugin.getSchemaFiles(); + if (pluginSchemaFiles != null) { + pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); + } + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + pluginLoadableTypes.forEach( + loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); + } } - - public static String fileBasedSchema(String fileName) { - String schema; - try { - InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); - schema = IOUtils.toString(is, StandardCharsets.UTF_8); - is.close(); - } catch (IOException e) { - throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); - } - return schema; + builder + .addDataLoaders(loaderSuppliers(loadableTypes)) + .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) + .setGraphQLQueryComplexityLimit(graphQLQueryComplexityLimit) + .setGraphQLQueryDepthLimit(graphQLQueryDepthLimit) + .setGraphQLQueryIntrospectionEnabled(graphQLQueryIntrospectionEnabled) + .configureRuntimeWiring(this::configureRuntimeWiring); + return builder; + } + + public static String fileBasedSchema(String fileName) { + String schema; + try { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); + schema = IOUtils.toString(is, StandardCharsets.UTF_8); + is.close(); + } catch (IOException e) { + throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); } - - private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { - final boolean isAnalyticsEnabled = analyticsService != null; - builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) - .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); - if (isAnalyticsEnabled) { - builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", - new GetChartsResolver(analyticsService, entityClient)) - .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) - .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient))); - } + return schema; + } + + private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { + final boolean isAnalyticsEnabled = analyticsService != null; + builder + .type( + "Query", + typeWiring -> + typeWiring.dataFetcher( + "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) + .type( + "AnalyticsChart", + typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); + if (isAnalyticsEnabled) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient)) + .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) + .dataFetcher( + "getMetadataAnalyticsCharts", + new GetMetadataAnalyticsResolver(entityClient))); } + } - private void configureContainerResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Container", typeWiring -> typeWiring + private void configureContainerResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Container", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, (env) -> ((Container) env.getSource()).getPlatform().getUrn())) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Container container = env.getSource(); - return container.getContainer() != null ? container.getContainer().getUrn() : null; - }) - ) + final Container container = env.getSource(); + return container.getContainer() != null + ? container.getContainer().getUrn() + : null; + })) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Container container = env.getSource(); - return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataPlatformInstance", typeWiring -> typeWiring - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())) - ); - } - - private void configureQueryResolvers(final RuntimeWiring.Builder builder) { - builder.type("Query", typeWiring -> typeWiring - .dataFetcher("appConfig", - new AppConfigResolver(gitVersion, analyticsService != null, - this.ingestionConfiguration, - this.authenticationConfiguration, - this.authorizationConfiguration, - this.supportsImpactAnalysis, - this.visualConfiguration, - this.telemetryConfiguration, - this.testsConfiguration, - this.datahubConfiguration, - this.viewsConfiguration, - this.featureFlags - )) - .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) - .dataFetcher("search", new SearchResolver(this.entityClient)) - .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) - .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) - .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) - .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) - .dataFetcher("browse", new BrowseResolver(browsableTypes)) - .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) - .dataFetcher("dataset", getResolver(datasetType)) - .dataFetcher("role", getResolver(roleType)) - .dataFetcher("versionedDataset", getResolver(versionedDatasetType, - (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) - .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) - .dataFetcher("notebook", getResolver(notebookType)) - .dataFetcher("corpUser", getResolver(corpUserType)) - .dataFetcher("corpGroup", getResolver(corpGroupType)) - .dataFetcher("dashboard", getResolver(dashboardType)) - .dataFetcher("chart", getResolver(chartType)) - .dataFetcher("tag", getResolver(tagType)) - .dataFetcher("dataFlow", getResolver(dataFlowType)) - .dataFetcher("dataJob", getResolver(dataJobType)) - .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) - .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) - .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("dataPlatform", getResolver(dataPlatformType)) - .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) - .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) - .dataFetcher("mlFeature", getResolver(mlFeatureType)) - .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) - .dataFetcher("mlModel", getResolver(mlModelType)) - .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) - .dataFetcher("assertion", getResolver(assertionType)) - .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) - .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) - .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) - .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) - .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService)) - .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) - .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) - .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) - .dataFetcher("container", getResolver(containerType)) - .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) - .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) - .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService)) - .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) - .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) - .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) - .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) - .dataFetcher("test", getResolver(testType)) - .dataFetcher("listTests", new ListTestsResolver(entityClient)) - .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) - .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) - .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) - .dataFetcher("entity", getEntityResolver()) - .dataFetcher("entities", getEntitiesResolver()) - .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) - .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) - .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) - .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) - .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) - .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) - .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService)) - .dataFetcher("dataProduct", getResolver(dataProductType)) - .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) - .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)) - ); - } - - private DataFetcher getEntitiesResolver() { - return new BatchGetEntitiesResolver(entityTypes, - (env) -> { - List urns = env.getArgument(URNS_FIELD_NAME); - return urns.stream().map((urn) -> { + final Container container = env.getSource(); + return container.getDataPlatformInstance() != null + ? container.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataPlatformInstance", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))); + } + + private void configureQueryResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "appConfig", + new AppConfigResolver( + gitVersion, + analyticsService != null, + this.ingestionConfiguration, + this.authenticationConfiguration, + this.authorizationConfiguration, + this.supportsImpactAnalysis, + this.visualConfiguration, + this.telemetryConfiguration, + this.testsConfiguration, + this.datahubConfiguration, + this.viewsConfiguration, + this.featureFlags)) + .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) + .dataFetcher("search", new SearchResolver(this.entityClient)) + .dataFetcher( + "searchAcrossEntities", + new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "scrollAcrossEntities", + new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "searchAcrossLineage", + new SearchAcrossLineageResolver(this.entityClient, this.entityRegistry)) + .dataFetcher( + "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "aggregateAcrossEntities", + new AggregateAcrossEntitiesResolver( + this.entityClient, this.viewService, this.formService)) + .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) + .dataFetcher( + "autoCompleteForMultiple", + new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) + .dataFetcher("browse", new BrowseResolver(browsableTypes)) + .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) + .dataFetcher("dataset", getResolver(datasetType)) + .dataFetcher("role", getResolver(roleType)) + .dataFetcher( + "versionedDataset", + getResolver( + versionedDatasetType, + (env) -> + new VersionedUrn() + .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) + .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) + .dataFetcher("notebook", getResolver(notebookType)) + .dataFetcher("corpUser", getResolver(corpUserType)) + .dataFetcher("corpGroup", getResolver(corpGroupType)) + .dataFetcher("dashboard", getResolver(dashboardType)) + .dataFetcher("chart", getResolver(chartType)) + .dataFetcher("tag", getResolver(tagType)) + .dataFetcher("dataFlow", getResolver(dataFlowType)) + .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) + .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) + .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("erModelRelationship", getResolver(erModelRelationshipType)) + .dataFetcher("dataPlatform", getResolver(dataPlatformType)) + .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) + .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) + .dataFetcher("mlFeature", getResolver(mlFeatureType)) + .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) + .dataFetcher("mlModel", getResolver(mlModelType)) + .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) + .dataFetcher("assertion", getResolver(assertionType)) + .dataFetcher("form", getResolver(formType)) + .dataFetcher("view", getResolver(dataHubViewType)) + .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) + .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) + .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) + .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) + .dataFetcher( + "listRecommendations", + new ListRecommendationsResolver(recommendationsService, viewService)) + .dataFetcher( + "getEntityCounts", new EntityCountsResolver(this.entityClient, viewService)) + .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) + .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher( + "getAccessTokenMetadata", + new GetAccessTokenMetadataResolver(statefulTokenService, this.entityClient)) + .dataFetcher("debugAccess", new DebugAccessResolver(this.entityClient, graphClient)) + .dataFetcher("container", getResolver(containerType)) + .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) + .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) + .dataFetcher( + "getSecretValues", + new GetSecretValuesResolver(this.entityClient, this.secretService)) + .dataFetcher( + "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) + .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) + .dataFetcher( + "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) + .dataFetcher("test", getResolver(testType)) + .dataFetcher("listTests", new ListTestsResolver(entityClient)) + .dataFetcher( + "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) + .dataFetcher( + "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) + .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("entity", getEntityResolver()) + .dataFetcher("entities", getEntitiesResolver()) + .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) + .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) + .dataFetcher( + "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) + .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) + .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) + .dataFetcher( + "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) + .dataFetcher( + "getQuickFilters", + new GetQuickFiltersResolver(this.entityClient, this.viewService)) + .dataFetcher("dataProduct", getResolver(dataProductType)) + .dataFetcher( + "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) + .dataFetcher( + "browseV2", + new BrowseV2Resolver(this.entityClient, this.viewService, this.formService)) + .dataFetcher("businessAttribute", getResolver(businessAttributeType)) + .dataFetcher( + "listBusinessAttributes", new ListBusinessAttributesResolver(this.entityClient)) + .dataFetcher( + "docPropagationSettings", + new DocPropagationSettingsResolver(this.settingsService))); + } + + private DataFetcher getEntitiesResolver() { + return new BatchGetEntitiesResolver( + entityTypes, + (env) -> { + final QueryContext context = env.getContext(); + List urns = env.getArgument(URNS_FIELD_NAME); + return urns.stream() + .map(UrnUtils::getUrn) + .map( + (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + return UrnToEntityMapper.map(context, urn); } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); + throw new RuntimeException("Failed to get entity", e); } - }).collect(Collectors.toList()); - }); - } + }) + .collect(Collectors.toList()); + }); + } + + private DataFetcher getEntityResolver() { + return new EntityTypeResolver( + entityTypes, + (env) -> { + try { + final QueryContext context = env.getContext(); + Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); + return UrnToEntityMapper.map(context, urn); + } catch (Exception e) { + throw new RuntimeException("Failed to get entity", e); + } + }); + } + + private DataFetcher getResolver(LoadableType loadableType) { + return getResolver(loadableType, this::getUrnField); + } + + private DataFetcher getResolver( + LoadableType loadableType, Function keyProvider) { + return new LoadableTypeResolver<>(loadableType, keyProvider); + } + + private String getUrnField(DataFetchingEnvironment env) { + return env.getArgument(URN_FIELD_NAME); + } + + private void configureMutationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> { + typeWiring + .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) + .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) + .dataFetcher( + "createTag", new CreateTagResolver(this.entityClient, this.entityService)) + .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) + .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) + .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) + .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) + .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) + .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) + .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) + .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) + .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) + .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher( + "updateERModelRelationship", + new UpdateERModelRelationshipResolver(this.entityClient)) + .dataFetcher( + "createERModelRelationship", + new CreateERModelRelationshipResolver( + this.entityClient, this.erModelRelationshipService)) + .dataFetcher("addTag", new AddTagResolver(entityService)) + .dataFetcher("addTags", new AddTagsResolver(entityService)) + .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) + .dataFetcher("removeTag", new RemoveTagResolver(entityService)) + .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) + .dataFetcher("addTerm", new AddTermResolver(entityService)) + .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) + .dataFetcher("addTerms", new AddTermsResolver(entityService)) + .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) + .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) + .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) + .dataFetcher( + "updateDescription", + new UpdateDescriptionResolver(entityService, this.entityClient)) + .dataFetcher("addOwner", new AddOwnerResolver(entityService)) + .dataFetcher("addOwners", new AddOwnersResolver(entityService)) + .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) + .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) + .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) + .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) + .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) + .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) + .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) + .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) + .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) + .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) + .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) + .dataFetcher( + "createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) + .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) + .dataFetcher( + "setDomain", new SetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) + .dataFetcher( + "updateDeprecation", + new UpdateDeprecationResolver(this.entityClient, this.entityService)) + .dataFetcher( + "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) + .dataFetcher( + "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "updateSecret", new UpdateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher( + "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) + .dataFetcher( + "revokeAccessToken", + new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) + .dataFetcher( + "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "createIngestionExecutionRequest", + new CreateIngestionExecutionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "cancelIngestionExecutionRequest", + new CancelIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher( + "createTestConnectionRequest", + new CreateTestConnectionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "upsertCustomAssertion", new UpsertCustomAssertionResolver(assertionService)) + .dataFetcher( + "reportAssertionResult", new ReportAssertionResultResolver(assertionService)) + .dataFetcher( + "deleteAssertion", + new DeleteAssertionResolver(this.entityClient, this.entityService)) + .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) + .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) + .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) + .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) + .dataFetcher( + "createGlossaryTerm", + new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createGlossaryNode", + new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateParentNode", + new UpdateParentNodeResolver(this.entityService, this.entityClient)) + .dataFetcher( + "deleteGlossaryEntity", + new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) + .dataFetcher( + "addRelatedTerms", + new AddRelatedTermsResolver(this.entityService, this.entityClient)) + .dataFetcher( + "removeRelatedTerms", + new RemoveRelatedTermsResolver(this.entityService, this.entityClient)) + .dataFetcher( + "createNativeUserResetToken", + new CreateNativeUserResetTokenResolver(this.nativeUserService)) + .dataFetcher( + "batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) + .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) + .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) + .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) + .dataFetcher( + "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) + .dataFetcher( + "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) + .dataFetcher("createPost", new CreatePostResolver(this.postService)) + .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher("updatePost", new UpdatePostResolver(this.postService)) + .dataFetcher( + "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) + .dataFetcher("createView", new CreateViewResolver(this.viewService)) + .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) + .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) + .dataFetcher( + "updateGlobalViewsSettings", + new UpdateGlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateCorpUserViewsSettings", + new UpdateCorpUserViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateLineage", + new UpdateLineageResolver(this.entityService, this.lineageService)) + .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) + .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) + .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) + .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) + .dataFetcher( + "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + .dataFetcher( + "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) + .dataFetcher( + "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) + .dataFetcher( + "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) + .dataFetcher( + "createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher("submitFormPrompt", new SubmitFormPromptResolver(this.formService)) + .dataFetcher("batchAssignForm", new BatchAssignFormResolver(this.formService)) + .dataFetcher( + "createDynamicFormAssignment", + new CreateDynamicFormAssignmentResolver(this.formService)) + .dataFetcher( + "verifyForm", new VerifyFormResolver(this.formService, this.groupService)) + .dataFetcher("batchRemoveForm", new BatchRemoveFormResolver(this.formService)) + .dataFetcher( + "upsertStructuredProperties", + new UpsertStructuredPropertiesResolver(this.entityClient)) + .dataFetcher( + "removeStructuredProperties", + new RemoveStructuredPropertiesResolver(this.entityClient)) + .dataFetcher( + "createStructuredProperty", + new CreateStructuredPropertyResolver(this.entityClient)) + .dataFetcher( + "updateStructuredProperty", + new UpdateStructuredPropertyResolver(this.entityClient)) + .dataFetcher("raiseIncident", new RaiseIncidentResolver(this.entityClient)) + .dataFetcher( + "updateIncidentStatus", + new UpdateIncidentStatusResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createForm", new CreateFormResolver(this.entityClient, this.formService)) + .dataFetcher("deleteForm", new DeleteFormResolver(this.entityClient)) + .dataFetcher("updateForm", new UpdateFormResolver(this.entityClient)) + .dataFetcher( + "updateDocPropagationSettings", + new UpdateDocPropagationSettingsResolver(this.settingsService)); + + if (featureFlags.isBusinessAttributeEntityEnabled()) { + typeWiring + .dataFetcher( + "createBusinessAttribute", + new CreateBusinessAttributeResolver( + this.entityClient, this.entityService, this.businessAttributeService)) + .dataFetcher( + "updateBusinessAttribute", + new UpdateBusinessAttributeResolver( + this.entityClient, this.businessAttributeService)) + .dataFetcher( + "deleteBusinessAttribute", + new DeleteBusinessAttributeResolver(this.entityClient)) + .dataFetcher( + "addBusinessAttribute", new AddBusinessAttributeResolver(this.entityService)) + .dataFetcher( + "removeBusinessAttribute", + new RemoveBusinessAttributeResolver(this.entityService)); + } + return typeWiring; + }); + } + + private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "SearchResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity()))) + .type( + "MatchedField", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity()))) + .type( + "SearchAcrossLineageResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))) + .type( + "AggregationMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity()))) + .type( + "RecommendationContent", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((RecommendationContent) env.getSource()).getEntity()))) + .type( + "BrowseResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities()))) + .type( + "ParentDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new EntityTypeBatchResolver( + entityTypes, + (env) -> { + final ParentDomainsResult result = env.getSource(); + return result != null ? result.getDomains() : null; + }))) + .type( + "EntityRelationshipLegacy", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))) + .type( + "EntityRelationship", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity()))) + .type( + "BrowseResultGroupV2", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))) + .type( + "BrowsePathEntry", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "LineageRelationship", + typeWiring -> + typeWiring + .dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((LineageRelationship) env.getSource()).getEntity())) + .dataFetcher( + "createdActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getCreatedActor() != null + ? relationship.getCreatedActor() + : null; + })) + .dataFetcher( + "updatedActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getUpdatedActor() != null + ? relationship.getUpdatedActor() + : null; + }))) + .type( + "ListDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new LoadableTypeBatchResolver<>( + domainType, + (env) -> + ((ListDomainsResult) env.getSource()) + .getDomains().stream() + .map(Domain::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryTermsResult", + typeWiring -> + typeWiring.dataFetcher( + "terms", + new LoadableTypeBatchResolver<>( + glossaryTermType, + (env) -> + ((GetRootGlossaryTermsResult) env.getSource()) + .getTerms().stream() + .map(GlossaryTerm::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryNodesResult", + typeWiring -> + typeWiring.dataFetcher( + "nodes", + new LoadableTypeBatchResolver<>( + glossaryNodeType, + (env) -> + ((GetRootGlossaryNodesResult) env.getSource()) + .getNodes().stream() + .map(GlossaryNode::getUrn) + .collect(Collectors.toList())))) + .type( + "AutoCompleteResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))) + .type( + "AutoCompleteResultForEntity", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))) + .type( + "PolicyMatchCriterionValue", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))) + .type( + "ListTestsResult", + typeWiring -> + typeWiring.dataFetcher( + "tests", + new LoadableTypeBatchResolver<>( + testType, + (env) -> + ((ListTestsResult) env.getSource()) + .getTests().stream() + .map(Test::getUrn) + .collect(Collectors.toList())))) + .type( + "QuickFilter", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity()))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "ownershipType", + new EntityTypeResolver( + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))) + .type( + "StructuredPropertiesEntry", + typeWiring -> + typeWiring + .dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertiesEntry) env.getSource()) + .getStructuredProperty() + .getUrn())) + .dataFetcher( + "valueEntities", + new BatchGetEntitiesResolver( + entityTypes, + (env) -> + ((StructuredPropertiesEntry) env.getSource()).getValueEntities()))); + } - private DataFetcher getEntityResolver() { - return new EntityTypeResolver(entityTypes, - (env) -> { - try { - Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); - } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); - } - }); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dataset} type. + */ + private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Dataset", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getContainer() != null + ? dataset.getContainer().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getDataPlatformInstance() != null + ? dataset.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "datasetProfiles", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "datasetProfile", + DatasetProfileMapper::map)) + .dataFetcher( + "operations", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "operation", + OperationMapper::map, + new SortCriterion() + .setField(OPERATION_EVENT_TIME_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) + .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(true, true))) + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher( + "assertions", new EntityAssertionsResolver(entityClient, graphClient)) + .dataFetcher("testResults", new TestResultsResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("runs", new EntityRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "SchemaField", + typeWiring -> + typeWiring.dataFetcher( + "schemaFieldEntity", + new LoadableTypeResolver<>( + schemaFieldType, + (env) -> ((SchemaField) env.getSource()).getSchemaFieldEntity().getUrn()))) + .type( + "UserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))) + .type( + "ForeignKeyConstraint", + typeWiring -> + typeWiring.dataFetcher( + "foreignDataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> + ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))) + .type( + "SiblingProperties", + typeWiring -> + typeWiring.dataFetcher( + "siblings", + new EntityTypeBatchResolver( + new ArrayList<>(entityTypes), + (env) -> ((SiblingProperties) env.getSource()).getSiblings()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))) + .type( + "DatasetStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } - private DataFetcher getResolver(LoadableType loadableType) { - return getResolver(loadableType, this::getUrnField); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.VersionedDataset} type. + */ + private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionedDataset", + typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null))); + } - private DataFetcher getResolver(LoadableType loadableType, - Function keyProvider) { - return new LoadableTypeResolver<>(loadableType, keyProvider); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. + */ + private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "AccessToken", + typeWiring -> + typeWiring.dataFetcher( + "metadata", + new LoadableTypeResolver<>( + accessTokenMetadataType, + (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))); + builder.type( + "ListAccessTokenResult", + typeWiring -> + typeWiring.dataFetcher( + "tokens", + new LoadableTypeBatchResolver<>( + accessTokenMetadataType, + (env) -> + ((ListAccessTokenResult) env.getSource()) + .getTokens().stream() + .map(AccessTokenMetadata::getUrn) + .collect(Collectors.toList())))); + } + + private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTerm", + typeWiring -> + typeWiring + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryNode", + typeWiring -> + typeWiring + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); + } + + private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "SchemaFieldEntity", + typeWiring -> + typeWiring.dataFetcher( + "parent", + new EntityTypeResolver( + entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))); + } + + private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "EntityPath", + typeWiring -> + typeWiring.dataFetcher( + "path", + new BatchGetEntitiesResolver( + entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); + } + + private void configureResolvedAuditStampResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "ResolvedAuditStamp", + typeWiring -> + typeWiring.dataFetcher( + "actor", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((ResolvedAuditStamp) env.getSource()).getActor().getUrn()))); + } - private String getUrnField(DataFetchingEnvironment env) { - return env.getArgument(URN_FIELD_NAME); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpUser} type. + */ + private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpUser", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "CorpUserInfo", + typeWiring -> + typeWiring.dataFetcher( + "manager", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))); + builder.type( + "CorpUserEditableProperties", + typeWiring -> + typeWiring.dataFetcher( + "platforms", + new LoadableTypeBatchResolver<>( + dataPlatformType, + (env) -> + ((CorpUserEditableProperties) env.getSource()) + .getPlatforms().stream() + .map(DataPlatform::getUrn) + .collect(Collectors.toList())))); + } - private void configureMutationResolvers(final RuntimeWiring.Builder builder) { - builder.type("Mutation", typeWiring -> typeWiring - .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) - .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) - .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService)) - .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) - .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) - .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) - .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) - .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) - .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) - .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) - .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) - .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) - .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("addTag", new AddTagResolver(entityService)) - .dataFetcher("addTags", new AddTagsResolver(entityService)) - .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) - .dataFetcher("removeTag", new RemoveTagResolver(entityService)) - .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) - .dataFetcher("addTerm", new AddTermResolver(entityService)) - .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) - .dataFetcher("addTerms", new AddTermsResolver(entityService)) - .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) - .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) - .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) - .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient)) - .dataFetcher("addOwner", new AddOwnerResolver(entityService)) - .dataFetcher("addOwners", new AddOwnersResolver(entityService)) - .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) - .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) - .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) - .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) - .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) - .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) - .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) - .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) - .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) - .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) - .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) - .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) - .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService)) - .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) - .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) - .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) - .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) - .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) - .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) - .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService)) - .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) - .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) - .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) - .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) - .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteGlossaryEntity", - new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) - .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) - .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) - .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) - .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) - .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) - .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) - .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) - .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - .dataFetcher("createPost", new CreatePostResolver(this.postService)) - .dataFetcher("deletePost", new DeletePostResolver(this.postService)) - .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) - .dataFetcher("createView", new CreateViewResolver(this.viewService)) - .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) - .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) - .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService)) - .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) - .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) - .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) - .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) - .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService)) - .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) - .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) - .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) - .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) - ); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpGroup} type. + */ + private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", + new EntityRelationshipsResultResolver(graphClient, entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder + .type( + "CorpGroupInfo", + typeWiring -> + typeWiring + .dataFetcher( + "admins", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getAdmins().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()))) + .dataFetcher( + "members", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getMembers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList())))) + .type( + "ListGroupsResult", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> + ((ListGroupsResult) env.getSource()) + .getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList())))); + } + + private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "Tag", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); + builder.type( + "TagAssociation", + typeWiring -> + typeWiring.dataFetcher( + "tag", + new LoadableTypeResolver<>( + tagType, + (env) -> + ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()) + .getTag() + .getUrn()))); + } + + private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTermAssociation", + typeWiring -> + typeWiring.dataFetcher( + "term", + new LoadableTypeResolver<>( + glossaryTermType, + (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))); + } - private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("SearchResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchResult) env.getSource()).getEntity())) - ) - .type("MatchedField", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((MatchedField) env.getSource()).getEntity())) - ) - .type("SearchAcrossLineageResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())) - ) - .type("AggregationMetadata", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((AggregationMetadata) env.getSource()).getEntity())) - ) - .type("RecommendationContent", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((RecommendationContent) env.getSource()).getEntity())) - ) - .type("BrowseResults", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((BrowseResults) env.getSource()).getEntities())) - ) - .type("ParentDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes, - (env) -> { - final ParentDomainsResult result = env.getSource(); - return result != null ? result.getDomains() : null; - })) - ) - .type("EntityRelationshipLegacy", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())) - ) - .type("EntityRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationship) env.getSource()).getEntity())) - ) - .type("BrowseResultGroupV2", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())) - ) - .type("BrowsePathEntry", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowsePathEntry) env.getSource()).getEntity())) - ) - .type("LineageRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((LineageRelationship) env.getSource()).getEntity())) - .dataFetcher("createdActor", - new EntityTypeResolver(entityTypes, - (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null; - }) - ) - .dataFetcher("updatedActor", - new EntityTypeResolver(entityTypes, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Notebook} type. + */ + private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Notebook", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null; - }) - ) - ) - .type("ListDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType, - (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream() - .map(Domain::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring - .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType, - (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream() - .map(GlossaryTerm::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring - .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType, - (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream() - .map(GlossaryNode::getUrn) - .collect(Collectors.toList()))) - ) - .type("AutoCompleteResults", typeWiring -> typeWiring - .dataFetcher("entities", - new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResults) env.getSource()).getEntities())) - ) - .type("AutoCompleteResultForEntity", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())) - ) - .type("PolicyMatchCriterionValue", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())) - ) - .type("ListTestsResult", typeWiring -> typeWiring - .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType, - (env) -> ((ListTestsResult) env.getSource()).getTests().stream() - .map(Test::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuickFilter", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((QuickFilter) env.getSource()).getEntity())) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes, - (env) -> ((Owner) env.getSource()).getOwnershipType())) - ); - } + final Notebook notebook = env.getSource(); + return notebook.getDataPlatformInstance() != null + ? notebook.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type. - */ - private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Dataset", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dashboard} type. + */ + private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dashboard", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null; - }) - ) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Dashboard dashboard = env.getSource(); + return dashboard.getDataPlatformInstance() != null + ? dashboard.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "datasetProfile", - DatasetProfileMapper::map - ) - ) - .dataFetcher("operations", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "operation", - OperationMapper::map, - new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING) - ) - ) - .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) - .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) - .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient)) - .dataFetcher("testResults", new TestResultsResolver(entityClient)) - .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("subTypes", new SubTypesResolver( - this.entityClient, - "dataset", - "subTypes")) - .dataFetcher("runs", new EntityRunsResolver(entityClient)) + final Dashboard dashboard = env.getSource(); + return dashboard.getContainer() != null + ? dashboard.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) + .dataFetcher( + "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("UserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>(corpUserType, - (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())) - ) - .type("ForeignKeyConstraint", typeWiring -> typeWiring - .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType, - (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())) - ) - .type("SiblingProperties", typeWiring -> typeWiring - .dataFetcher("siblings", - new EntityTypeBatchResolver( - new ArrayList<>(entityTypes), - (env) -> ((SiblingProperties) env.getSource()).getSiblings())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ) - .type("DatasetStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); + builder.type( + "DashboardInfo", + typeWiring -> + typeWiring.dataFetcher( + "charts", + new LoadableTypeBatchResolver<>( + chartType, + (env) -> + ((DashboardInfo) env.getSource()) + .getCharts().stream() + .map(Chart::getUrn) + .collect(Collectors.toList())))); + builder.type( + "DashboardUserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))); + builder.type( + "DashboardStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type. - */ - private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("VersionedDataset", typeWiring -> typeWiring - .dataFetcher("relationships", new StaticDataFetcher(null))); - - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. - */ - private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { - builder.type("AccessToken", typeWiring -> typeWiring - .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType, - (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())) - ); - builder.type("ListAccessTokenResult", typeWiring -> typeWiring - .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType, - (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream() - .map(AccessTokenMetadata::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryNode", typeWiring -> typeWiring - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { - builder.type("SchemaFieldEntity", typeWiring -> typeWiring - .dataFetcher("parent", new EntityTypeResolver(entityTypes, - (env) -> ((SchemaFieldEntity) env.getSource()).getParent())) - ); - } - - private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { - builder.type("EntityPath", typeWiring -> typeWiring - .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes, - (env) -> ((EntityPath) env.getSource()).getPath())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type. - */ - private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpUser", typeWiring -> typeWiring - .dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("CorpUserInfo", typeWiring -> typeWiring - .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType, - (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type. - */ - private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); - builder.type("CorpGroupInfo", typeWiring -> typeWiring - .dataFetcher("admins", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - .dataFetcher("members", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - ) - .type("ListGroupsResult", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType, - (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("Tag", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - builder.type("TagAssociation", typeWiring -> typeWiring - .dataFetcher("tag", - new LoadableTypeResolver<>(tagType, - (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn())) - ); - } - - private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTermAssociation", typeWiring -> typeWiring - .dataFetcher("term", - new LoadableTypeResolver<>(glossaryTermType, - (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())) - ); - } + DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } + + private void configureStructuredPropertyResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "StructuredPropertyDefinition", + typeWiring -> + typeWiring + .dataFetcher( + "valueType", + new LoadableTypeResolver<>( + dataTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getValueType() + .getUrn())) + .dataFetcher( + "entityTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((StructuredPropertyDefinition) env.getSource()) + .getEntityTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + builder.type( + "TypeQualifier", + typeWiring -> + typeWiring.dataFetcher( + "allowedTypes", + new LoadableTypeBatchResolver<>( + entityTypeType, + (env) -> + ((TypeQualifier) env.getSource()) + .getAllowedTypes().stream() + .map(entityTypeType.getKeyProvider()) + .collect(Collectors.toList())))); + } /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type. + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Chart} type. */ - private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { - builder.type("Notebook", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Notebook notebook = env.getSource(); - return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type. - */ - private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { - builder.type("Dashboard", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>(containerType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) - .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("DashboardInfo", typeWiring -> typeWiring - .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType, - (env) -> ((DashboardInfo) env.getSource()).getCharts().stream() - .map(Chart::getUrn) - .collect(Collectors.toList()))) - ); - builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>( - corpUserType, - (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())) - ); - builder.type("DashboardStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, - (env) -> { - DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type. - */ - private void configureChartResolvers(final RuntimeWiring.Builder builder) { - builder.type("Chart", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>( - containerType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getContainer() != null ? chart.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("ChartInfo", typeWiring -> typeWiring - .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((ChartInfo) env.getSource()).getInputs().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - ); - } - - /** - * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. - */ - private void configureTypeResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Entity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("EntityWithRelationships", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("BrowsableEntity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream() - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("OwnerType", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("PlatformSchema", typeWiring -> typeWiring - .typeResolver(new PlatformSchemaUnionTypeResolver()) - ) - .type("HyperParameterValueType", typeWiring -> typeWiring - .typeResolver(new HyperParameterValueTypeResolver()) - ) - .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) - .type("TimeSeriesAspect", typeWiring -> typeWiring - .typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring - .typeResolver(new ResultsTypeResolver())); - } - - /** - * Configures custom type extensions leveraged within our GraphQL schema. - */ - private void configureTypeExtensions(final RuntimeWiring.Builder builder) { - builder.scalar(GraphQLLong); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type. - */ - private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataJob", typeWiring -> typeWiring + private void configureChartResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Chart", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataJob dataJob = env.getSource(); - return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + final Chart chart = env.getSource(); + return chart.getDataPlatformInstance() != null + ? chart.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Chart chart = env.getSource(); + return chart.getContainer() != null + ? chart.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - ) - .type("DataJobInputOutput", typeWiring -> typeWiring - .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream() - .map(DataJob::getUrn) - .collect(Collectors.toList()))) - ); - } + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); + builder.type( + "ChartInfo", + typeWiring -> + typeWiring.dataFetcher( + "inputs", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((ChartInfo) env.getSource()) + .getInputs().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList())))); + } + + /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */ + private void configureTypeResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Entity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "EntityWithRelationships", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "BrowsableEntity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + browsableTypes.stream() + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "OwnerType", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + ownerTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "PlatformSchema", + typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver())) + .type( + "HyperParameterValueType", + typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver())) + .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) + .type( + "TimeSeriesAspect", + typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + } + + /** Configures custom type extensions leveraged within our GraphQL schema. */ + private void configureTypeExtensions(final RuntimeWiring.Builder builder) { + builder.scalar(GraphQLLong); + } + + /** Configures resolvers responsible for resolving the {@link ERModelRelationship} type. */ + private void configureERModelRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "ERModelRelationship", + typeWiring -> + typeWiring + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ERModelRelationshipProperties", + typeWiring -> + typeWiring + .dataFetcher( + "source", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationshipProperties erModelRelationshipProperties = + env.getSource(); + return erModelRelationshipProperties.getSource() != null + ? erModelRelationshipProperties.getSource().getUrn() + : null; + })) + .dataFetcher( + "destination", + new LoadableTypeResolver<>( + datasetType, + (env) -> { + final ERModelRelationshipProperties erModelRelationshipProperties = + env.getSource(); + return erModelRelationshipProperties.getDestination() != null + ? erModelRelationshipProperties.getDestination().getUrn() + : null; + }))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))); + } + + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataJob} type. + */ + private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataJob", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "dataFlow", + new LoadableTypeResolver<>( + dataFlowType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataFlow() != null + ? dataJob.getDataFlow().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataPlatformInstance() != null + ? dataJob.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))) + .type( + "DataJobInputOutput", + typeWiring -> + typeWiring + .dataFetcher( + "inputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "outputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getOutputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "inputDatajobs", + new LoadableTypeBatchResolver<>( + dataJobType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatajobs().stream() + .map(DataJob::getUrn) + .collect(Collectors.toList())))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type. - */ - private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataFlow", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataFlow} type. + */ + private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataFlow", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataFlow dataFlow = env.getSource(); - return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } + final DataFlow dataFlow = env.getSource(); + return dataFlow.getDataPlatformInstance() != null + ? dataFlow.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "health", + new EntityHealthResolver( + entityClient, + graphClient, + timeseriesAspectService, + new EntityHealthResolver.Config(false, true)))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type. - */ - private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { - builder - .type("MLFeatureTable", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.MLFeatureTable} type. + */ + private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "MLFeatureTable", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeatureTable entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeatureTableProperties", + typeWiring -> + typeWiring + .dataFetcher( + "mlFeatures", + new LoadableTypeBatchResolver<>( + mlFeatureType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlFeatures().stream() + .map(MLFeature::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of())) + .dataFetcher( + "mlPrimaryKeys", + new LoadableTypeBatchResolver<>( + mlPrimaryKeyType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() + != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlPrimaryKeys().stream() + .map(MLPrimaryKey::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of()))) + .type( + "MLFeatureProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, (env) -> { - final MLFeatureTable entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeatureTableProperties", typeWiring -> typeWiring - .dataFetcher("mlFeatures", - new LoadableTypeBatchResolver<>(mlFeatureType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream() - .map(MLFeature::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - .dataFetcher("mlPrimaryKeys", - new LoadableTypeBatchResolver<>(mlPrimaryKeyType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream() - .map(MLPrimaryKey::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - ) - .type("MLFeatureProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLFeatureProperties) env.getSource()).getSources() == null) { + if (((MLFeatureProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLFeatureProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLPrimaryKeyProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { + } + return ((MLFeatureProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLPrimaryKeyProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> { + if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLModel", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return ((MLPrimaryKeyProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLModel", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModel mlModel = env.getSource(); + return mlModel.getDataPlatformInstance() != null + ? mlModel.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLModelProperties", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + mlModelGroupType, (env) -> { - final MLModel mlModel = env.getSource(); - return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLModelProperties", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType, - (env) -> { - MLModelProperties properties = env.getSource(); - if (properties.getGroups() != null) { + MLModelProperties properties = env.getSource(); + if (properties.getGroups() != null) { return properties.getGroups().stream() .map(MLModelGroup::getUrn) .collect(Collectors.toList()); - } - return Collections.emptyList(); - }) - ) - ) - .type("MLModelGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return Collections.emptyList(); + }))) + .type( + "MLModelGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModelGroup entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeature", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeature entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLPrimaryKey", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, + restrictedService, + this.authorizationConfiguration)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLPrimaryKey entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "GlossaryTerm", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "GlossaryNode", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureDomainResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Domain", + typeWiring -> + typeWiring + .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) + .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "DomainAssociation", + typeWiring -> + typeWiring.dataFetcher( + "domain", + new LoadableTypeResolver<>( + domainType, + (env) -> + ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()) + .getDomain() + .getUrn()))); + } + + private void configureFormResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "FormAssociation", + typeWiring -> + typeWiring.dataFetcher( + "form", + new LoadableTypeResolver<>( + formType, + (env) -> + ((com.linkedin.datahub.graphql.generated.FormAssociation) env.getSource()) + .getForm() + .getUrn()))); + builder.type( + "StructuredPropertyParams", + typeWiring -> + typeWiring.dataFetcher( + "structuredProperty", + new LoadableTypeResolver<>( + structuredPropertyType, + (env) -> + ((StructuredPropertyParams) env.getSource()) + .getStructuredProperty() + .getUrn()))); + builder.type( + "FormActorAssignment", + typeWiring -> + typeWiring + .dataFetcher( + "users", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - final MLModelGroup entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeature", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final FormActorAssignment actors = env.getSource(); + return actors.getUsers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, (env) -> { - final MLFeature entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLPrimaryKey", typeWiring -> typeWiring + final FormActorAssignment actors = env.getSource(); + return actors.getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList()); + })) + .dataFetcher("isAssignedToMe", new IsFormAssignedToMeResolver(groupService))); + } + + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProduct", + typeWiring -> + typeWiring + .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Assertion", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final MLPrimaryKey entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))) - .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureDomainResolvers(final RuntimeWiring.Builder builder) { - builder.type("Domain", typeWiring -> typeWiring - .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) - .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("DomainAssociation", typeWiring -> typeWiring - .dataFetcher("domain", - new LoadableTypeResolver<>(domainType, - (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn())) - ); - } - - private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProduct", typeWiring -> typeWiring - .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - } - - private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { - builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Assertion assertion = env.getSource(); + return assertion.getDataPlatformInstance() != null + ? assertion.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); + } + + private void configureContractResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dataset", + typeWiring -> + typeWiring.dataFetcher( + "contract", new EntityDataContractResolver(this.entityClient, this.graphClient))); + builder.type( + "FreshnessContract", + typeWiring -> + typeWiring.dataFetcher( + "assertion", + new LoadableTypeResolver<>( + getAssertionType(), (env) -> { - final Assertion assertion = env.getSource(); - return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); - } - - private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { - // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. - builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers", - new LoadableTypeBatchResolver<>(corpUserType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getUsers(); - })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getGroups(); - })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getRoles(); - })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getResourceOwnersTypes(); - }))); - } - - private void configureRoleResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataHubRole", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureViewResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataHubView", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListViewsResult", typeWiring -> typeWiring - .dataFetcher("views", new LoadableTypeBatchResolver<>( - dataHubViewType, - (env) -> ((ListViewsResult) env.getSource()).getViews().stream() - .map(DataHubView::getUrn) - .collect(Collectors.toList()))) - ) - .type("CorpUserViewsSettings", typeWiring -> typeWiring - .dataFetcher("defaultView", new LoadableTypeResolver<>( + final FreshnessContract contract = env.getSource(); + return contract.getAssertion() != null + ? contract.getAssertion().getUrn() + : null; + }))); + builder.type( + "DataQualityContract", + typeWiring -> + typeWiring.dataFetcher( + "assertion", + new LoadableTypeResolver<>( + getAssertionType(), + (env) -> { + final DataQualityContract contract = env.getSource(); + return contract.getAssertion() != null + ? contract.getAssertion().getUrn() + : null; + }))); + builder.type( + "SchemaContract", + typeWiring -> + typeWiring.dataFetcher( + "assertion", + new LoadableTypeResolver<>( + getAssertionType(), + (env) -> { + final SchemaContract contract = env.getSource(); + return contract.getAssertion() != null + ? contract.getAssertion().getUrn() + : null; + }))); + builder.type( + "Mutation", + typeWiring -> + typeWiring.dataFetcher( + "upsertDataContract", + new UpsertDataContractResolver(this.entityClient, this.graphClient))); + } + + private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { + // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the + // Policy type. + builder.type( + "ActorFilter", + typeWiring -> + typeWiring + .dataFetcher( + "resolvedUsers", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getUsers(); + })) + .dataFetcher( + "resolvedGroups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getGroups(); + })) + .dataFetcher( + "resolvedRoles", + new LoadableTypeBatchResolver<>( + dataHubRoleType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getRoles(); + })) + .dataFetcher( + "resolvedOwnershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getResourceOwnersTypes(); + }))); + } + + private void configureDataHubRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataHubRole", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureViewResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataHubView", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListViewsResult", + typeWiring -> + typeWiring.dataFetcher( + "views", + new LoadableTypeBatchResolver<>( + dataHubViewType, + (env) -> + ((ListViewsResult) env.getSource()) + .getViews().stream() + .map(DataHubView::getUrn) + .collect(Collectors.toList())))) + .type( + "CorpUserViewsSettings", + typeWiring -> + typeWiring.dataFetcher( + "defaultView", + new LoadableTypeResolver<>( dataHubViewType, (env) -> { - final CorpUserViewsSettings settings = env.getSource(); - if (settings.getDefaultView() != null) { - return settings.getDefaultView().getUrn(); - } - return null; - } - ) - )); - } - - private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("QueryEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListQueriesResult", typeWiring -> typeWiring - .dataFetcher("queries", new LoadableTypeBatchResolver<>( - queryType, - (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream() - .map(QueryEntity::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuerySubject", typeWiring -> typeWiring - .dataFetcher("dataset", new LoadableTypeResolver<>( - datasetType, - (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())) - ); - - } - - private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { - builder - .type("OwnershipTypeEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListOwnershipTypesResult", typeWiring -> typeWiring - .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType, - (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream() - .map(OwnershipTypeEntity::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProcessInstance", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance", - DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map))); - } - - private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { - builder.type("TestResult", typeWiring -> typeWiring - .dataFetcher("test", new LoadableTypeResolver<>(testType, - (env) -> { - final TestResult testResult = env.getSource(); - return testResult.getTest() != null ? testResult.getTest().getUrn() : null; - })) - ); - } - - private DataLoader> createDataLoader(final LoadableType graphType, final QueryContext queryContext) { - BatchLoaderContextProvider contextProvider = () -> queryContext; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); - return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> { - try { - log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys)); - return graphType.batchLoad(keys, context.getContext()); - } catch (Exception e) { - log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e); - } - }), loaderOptions); - } - - private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { - builder.type("IngestionSource", typeWiring -> typeWiring - .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> { - final IngestionSource ingestionSource = env.getSource(); - return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null; - }) - )); + final CorpUserViewsSettings settings = env.getSource(); + if (settings.getDefaultView() != null) { + return settings.getDefaultView().getUrn(); + } + return null; + }))); + } + + private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "QueryEntity", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final QueryEntity query = env.getSource(); + return query.getPlatform() != null + ? query.getPlatform().getUrn() + : null; + }))) + .type( + "QueryProperties", + typeWiring -> + typeWiring.dataFetcher( + "origin", + new EntityTypeResolver( + entityTypes, (env) -> ((QueryProperties) env.getSource()).getOrigin()))) + .type( + "ListQueriesResult", + typeWiring -> + typeWiring.dataFetcher( + "queries", + new LoadableTypeBatchResolver<>( + queryType, + (env) -> + ((ListQueriesResult) env.getSource()) + .getQueries().stream() + .map(QueryEntity::getUrn) + .collect(Collectors.toList())))) + .type( + "QuerySubject", + typeWiring -> + typeWiring.dataFetcher( + "dataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))); + } + + private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "OwnershipTypeEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListOwnershipTypesResult", + typeWiring -> + typeWiring.dataFetcher( + "ownershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> + ((ListOwnershipTypesResult) env.getSource()) + .getOwnershipTypes().stream() + .map(OwnershipTypeEntity::getUrn) + .collect(Collectors.toList())))); + } + + private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProcessInstance", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher( + "state", + new TimeSeriesAspectResolver( + this.entityClient, + "dataProcessInstance", + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + DataProcessInstanceRunEventMapper::map))); + } + + private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "TestResult", + typeWiring -> + typeWiring.dataFetcher( + "test", + new LoadableTypeResolver<>( + testType, + (env) -> { + final TestResult testResult = env.getSource(); + return testResult.getTest() != null ? testResult.getTest().getUrn() : null; + }))); + } + + private DataLoader> createDataLoader( + final LoadableType graphType, final QueryContext queryContext) { + BatchLoaderContextProvider contextProvider = () -> queryContext; + DataLoaderOptions loaderOptions = + DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); + return DataLoader.newDataLoader( + (keys, context) -> + GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug( + String.format( + "Batch loading entities of type: %s, keys: %s", + graphType.name(), keys)); + return graphType.batchLoad(keys, context.getContext()); + } catch (Exception e) { + log.error( + String.format( + "Failed to load Entities of type: %s, keys: %s", + graphType.name(), keys) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to retrieve entities of type %s", graphType.name()), + e); + } + }, + graphType.getClass().getSimpleName(), + "batchLoad"), + loaderOptions); + } + + private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "IngestionSource", + typeWiring -> + typeWiring + .dataFetcher( + "executions", new IngestionSourceExecutionRequestsResolver(entityClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final IngestionSource ingestionSource = env.getSource(); + return ingestionSource.getPlatform() != null + ? ingestionSource.getPlatform().getUrn() + : null; + }))); + } + + private void configureIncidentResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Incident", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "IncidentSource", + typeWiring -> + typeWiring.dataFetcher( + "source", + new LoadableTypeResolver<>( + this.assertionType, + (env) -> { + final IncidentSource incidentSource = env.getSource(); + return incidentSource.getSource() != null + ? incidentSource.getSource().getUrn() + : null; + }))); + + // Add incidents attribute to all entities that support it + final List entitiesWithIncidents = + ImmutableList.of("Dataset", "DataJob", "DataFlow", "Dashboard", "Chart"); + for (String entity : entitiesWithIncidents) { + builder.type( + entity, + typeWiring -> + typeWiring.dataFetcher("incidents", new EntityIncidentsResolver(entityClient))); } + } + + private void configureRestrictedResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Restricted", + typeWiring -> + typeWiring + .dataFetcher( + "lineage", + new EntityLineageResultResolver( + siblingGraphService, restrictedService, this.authorizationConfiguration)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> typeWiring.dataFetcher("isAssignedToMe", new IsAssignedToMeResolver())); + } + + private void configureBusinessAttributeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "BusinessAttribute", + typeWiring -> + typeWiring + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))) + .type( + "ListBusinessAttributesResult", + typeWiring -> + typeWiring.dataFetcher( + "businessAttributes", + new LoadableTypeBatchResolver<>( + businessAttributeType, + (env) -> + ((ListBusinessAttributesResult) env.getSource()) + .getBusinessAttributes().stream() + .map(BusinessAttribute::getUrn) + .collect(Collectors.toList())))); + } + + private void configureBusinessAttributeAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "BusinessAttributeAssociation", + typeWiring -> + typeWiring.dataFetcher( + "businessAttribute", + new LoadableTypeResolver<>( + businessAttributeType, + (env) -> + ((BusinessAttributeAssociation) env.getSource()) + .getBusinessAttribute() + .getUrn()))); + } + + private void configureConnectionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> + typeWiring.dataFetcher( + "upsertConnection", + new UpsertConnectionResolver(connectionService, secretService))); + builder.type( + "Query", + typeWiring -> typeWiring.dataFetcher("connection", getResolver(this.connectionType))); + builder.type( + "DataHubConnection", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + this.dataPlatformType, + (env) -> { + final DataHubConnection connection = env.getSource(); + return connection.getPlatform() != null + ? connection.getPlatform().getUrn() + : null; + }))); + } + + private void configureDeprecationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Deprecation", + typeWiring -> + typeWiring.dataFetcher( + "actorEntity", + new EntityTypeResolver( + entityTypes, (env) -> ((Deprecation) env.getSource()).getActorEntity()))); + } + + private void configureMetadataAttributionResolver(final RuntimeWiring.Builder builder) { + builder.type( + "MetadataAttribution", + typeWiring -> + typeWiring + .dataFetcher( + "actor", + new EntityTypeResolver( + entityTypes, (env) -> ((MetadataAttribution) env.getSource()).getActor())) + .dataFetcher( + "source", + new EntityTypeResolver( + entityTypes, + (env) -> ((MetadataAttribution) env.getSource()).getSource()))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 157fb10ce70785..f6ab3a603dbb7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -12,19 +12,24 @@ import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.client.SystemEntityClient; +import com.linkedin.metadata.client.UsageStatsJavaClient; import com.linkedin.metadata.config.DataHubConfiguration; import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.config.TestsConfiguration; import com.linkedin.metadata.config.ViewsConfiguration; import com.linkedin.metadata.config.VisualConfiguration; import com.linkedin.metadata.config.telemetry.TelemetryConfiguration; +import com.linkedin.metadata.connection.ConnectionService; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; +import com.linkedin.metadata.service.AssertionService; +import com.linkedin.metadata.service.BusinessAttributeService; import com.linkedin.metadata.service.DataProductService; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; @@ -33,46 +38,56 @@ import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.version.GitVersion; -import com.linkedin.usage.UsageClient; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import lombok.Data; @Data public class GmsGraphQLEngineArgs { - EntityClient entityClient; - SystemEntityClient systemEntityClient; - GraphClient graphClient; - UsageClient usageClient; - AnalyticsService analyticsService; - EntityService entityService; - RecommendationsService recommendationsService; - StatefulTokenService statefulTokenService; - TimeseriesAspectService timeseriesAspectService; - EntityRegistry entityRegistry; - SecretService secretService; - NativeUserService nativeUserService; - IngestionConfiguration ingestionConfiguration; - AuthenticationConfiguration authenticationConfiguration; - AuthorizationConfiguration authorizationConfiguration; - GitVersion gitVersion; - TimelineService timelineService; - boolean supportsImpactAnalysis; - VisualConfiguration visualConfiguration; - TelemetryConfiguration telemetryConfiguration; - TestsConfiguration testsConfiguration; - DataHubConfiguration datahubConfiguration; - ViewsConfiguration viewsConfiguration; - SiblingGraphService siblingGraphService; - GroupService groupService; - RoleService roleService; - InviteTokenService inviteTokenService; - PostService postService; - ViewService viewService; - OwnershipTypeService ownershipTypeService; - SettingsService settingsService; - LineageService lineageService; - QueryService queryService; - FeatureFlags featureFlags; - DataProductService dataProductService; + EntityClient entityClient; + SystemEntityClient systemEntityClient; + GraphClient graphClient; + UsageStatsJavaClient usageClient; + AnalyticsService analyticsService; + EntityService entityService; + RecommendationsService recommendationsService; + StatefulTokenService statefulTokenService; + TimeseriesAspectService timeseriesAspectService; + EntityRegistry entityRegistry; + SecretService secretService; + NativeUserService nativeUserService; + IngestionConfiguration ingestionConfiguration; + AuthenticationConfiguration authenticationConfiguration; + AuthorizationConfiguration authorizationConfiguration; + GitVersion gitVersion; + TimelineService timelineService; + boolean supportsImpactAnalysis; + VisualConfiguration visualConfiguration; + TelemetryConfiguration telemetryConfiguration; + TestsConfiguration testsConfiguration; + DataHubConfiguration datahubConfiguration; + ViewsConfiguration viewsConfiguration; + SiblingGraphService siblingGraphService; + GroupService groupService; + RoleService roleService; + InviteTokenService inviteTokenService; + PostService postService; + ViewService viewService; + OwnershipTypeService ownershipTypeService; + SettingsService settingsService; + LineageService lineageService; + QueryService queryService; + FeatureFlags featureFlags; + DataProductService dataProductService; + ERModelRelationshipService erModelRelationshipService; + FormService formService; + RestrictedService restrictedService; + int graphQLQueryComplexityLimit; + int graphQLQueryDepthLimit; + boolean graphQLQueryIntrospectionEnabled; + BusinessAttributeService businessAttributeService; + ConnectionService connectionService; + AssertionService assertionService; - //any fork specific args should go below this line + // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index e7ef0c402a1de5..a544bd46527c46 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -1,45 +1,50 @@ package com.linkedin.datahub.graphql; +import com.linkedin.datahub.graphql.types.EntityType; import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.idl.RuntimeWiring; import java.util.Collection; import java.util.List; - /** - * An interface that allows the Core GMS GraphQL Engine to be extended without requiring - * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers - * need to be introduced. This is useful if you are maintaining a fork of DataHub and - * don't want to deal with merge conflicts. + * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code + * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be + * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with + * merge conflicts. */ public interface GmsGraphQLPlugin { /** * Initialization method that allows the plugin to instantiate + * * @param args */ void init(GmsGraphQLEngineArgs args); /** - * Return a list of schema files that contain graphql definitions - * that are served by this plugin + * Return a list of schema files that contain graphql definitions that are served by this plugin + * * @return */ List getSchemaFiles(); /** * Return a list of LoadableTypes that this plugin serves + * * @return */ Collection> getLoadableTypes(); + /** Return a list of Entity Types that the plugin services */ + Collection> getEntityTypes(); + /** - * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers. + * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific + * resolvers. + * * @param wiringBuilder : the builder being used to configure the runtime wiring * @param baseEngine : a reference to the core engine and its graphql types */ - default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) { - - } - + default void configureExtraResolvers( + final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index 74c4c541b972b1..dd8eabd3ce06fd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -1,15 +1,23 @@ package com.linkedin.datahub.graphql; +import static graphql.schema.idl.RuntimeWiring.*; + import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler; +import com.linkedin.datahub.graphql.instrumentation.DataHubFieldComplexityCalculator; import graphql.ExecutionInput; import graphql.ExecutionResult; import graphql.GraphQL; +import graphql.analysis.MaxQueryComplexityInstrumentation; +import graphql.analysis.MaxQueryDepthInstrumentation; +import graphql.execution.instrumentation.ChainedInstrumentation; +import graphql.execution.instrumentation.Instrumentation; import graphql.execution.instrumentation.tracing.TracingInstrumentation; import graphql.schema.GraphQLSchema; import graphql.schema.idl.RuntimeWiring; import graphql.schema.idl.SchemaGenerator; import graphql.schema.idl.SchemaParser; import graphql.schema.idl.TypeDefinitionRegistry; +import graphql.schema.visibility.NoIntrospectionGraphqlFieldVisibility; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -22,152 +30,202 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import static graphql.schema.idl.RuntimeWiring.*; - /** - * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing - * GQL queries. - - *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more - * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and + * executing GQL queries. * - *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables. + *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances + * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * + *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set + * of variables. */ public class GraphQLEngine { - private final GraphQL _graphQL; - private final Map>> _dataLoaderSuppliers; - - private GraphQLEngine(@Nonnull final List schemas, - @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map>> dataLoaderSuppliers) { - - _dataLoaderSuppliers = dataLoaderSuppliers; - - /* - * Parse schema - */ - SchemaParser schemaParser = new SchemaParser(); - TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); - schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); + private final GraphQL _graphQL; + private final Map>> _dataLoaderSuppliers; + private final int graphQLQueryComplexityLimit; + private final int graphQLQueryDepthLimit; + private final boolean graphQLQueryIntrospectionEnabled; + + private GraphQLEngine( + @Nonnull final List schemas, + @Nonnull final RuntimeWiring runtimeWiring, + @Nonnull final Map>> dataLoaderSuppliers, + @Nonnull final int graphQLQueryComplexityLimit, + @Nonnull final int graphQLQueryDepthLimit, + @Nonnull final boolean graphQLQueryIntrospectionEnabled) { + this.graphQLQueryComplexityLimit = graphQLQueryComplexityLimit; + this.graphQLQueryDepthLimit = graphQLQueryDepthLimit; + this.graphQLQueryIntrospectionEnabled = graphQLQueryIntrospectionEnabled; + + _dataLoaderSuppliers = dataLoaderSuppliers; + + /* + * Parse schema + */ + SchemaParser schemaParser = new SchemaParser(); + TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); + schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); - /* - * Configure resolvers (data fetchers) - */ - SchemaGenerator schemaGenerator = new SchemaGenerator(); - GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + /* + * Configure resolvers (data fetchers) + */ + SchemaGenerator schemaGenerator = new SchemaGenerator(); + GraphQLSchema graphQLSchema = + schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); - /* - * Instantiate engine - */ - _graphQL = new GraphQL.Builder(graphQLSchema) + /* + * Instantiate engine + */ + List instrumentations = new ArrayList<>(3); + instrumentations.add(new TracingInstrumentation()); + instrumentations.add(new MaxQueryDepthInstrumentation(graphQLQueryDepthLimit)); + instrumentations.add( + new MaxQueryComplexityInstrumentation( + graphQLQueryComplexityLimit, new DataHubFieldComplexityCalculator())); + ChainedInstrumentation chainedInstrumentation = new ChainedInstrumentation(instrumentations); + _graphQL = + new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) - .instrumentation(new TracingInstrumentation()) + .instrumentation(chainedInstrumentation) .build(); - } + } + + public ExecutionResult execute( + @Nonnull final String query, + @Nullable final String operationName, + @Nullable final Map variables, + @Nonnull final QueryContext context) { + /* + * Init DataLoaderRegistry - should be created for each request. + */ + DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - public ExecutionResult execute(@Nonnull final String query, - @Nullable final Map variables, - @Nonnull final QueryContext context) { - /* - * Init DataLoaderRegistry - should be created for each request. - */ - DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - - /* - * Construct execution input - */ - ExecutionInput executionInput = ExecutionInput.newExecutionInput() + /* + * Construct execution input + */ + ExecutionInput executionInput = + ExecutionInput.newExecutionInput() .query(query) + .operationName(operationName) .variables(variables) .dataLoaderRegistry(register) .context(context) .build(); - /* - * Execute GraphQL Query - */ - return _graphQL.execute(executionInput); + /* + * Execute GraphQL Query + */ + return _graphQL.execute(executionInput); + } + + public GraphQL getGraphQL() { + return _graphQL; + } + + public static Builder builder() { + return new Builder(); + } + + /** Used to construct a {@link GraphQLEngine}. */ + public static class Builder { + + private final List _schemas = new ArrayList<>(); + private final Map>> _loaderSuppliers = + new HashMap<>(); + private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + private int graphQLQueryComplexityLimit = 2000; + private int graphQLQueryDepthLimit = 50; + private boolean graphQLQueryIntrospectionEnabled = true; + + /** + * Used to add a schema file containing the GQL types resolved by the engine. + * + *

If multiple files are provided, their schemas will be merged together. + */ + public Builder addSchema(final String schema) { + _schemas.add(schema); + return this; } - public GraphQL getGraphQL() { - return _graphQL; + /** + * Used to register a {@link DataLoader} to be used within the configured resolvers. + * + *

The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} + * when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoader( + final String name, final Function> dataLoaderSupplier) { + _loaderSuppliers.put(name, dataLoaderSupplier); + return this; } - public static Builder builder() { - return new Builder(); + /** + * Used to register multiple {@link DataLoader}s for use within the configured resolvers. + * + *

The included {@link Supplier} provided is expected to return a new instance of {@link + * DataLoader} when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoaders( + Map>> dataLoaderSuppliers) { + _loaderSuppliers.putAll(dataLoaderSuppliers); + return this; } /** - * Used to construct a {@link GraphQLEngine}. + * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the + * Graph QL schema. + * + *

The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register + * any required data + type resolvers. */ - public static class Builder { - - private final List _schemas = new ArrayList<>(); - private final Map>> _loaderSuppliers = new HashMap<>(); - private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); - - /** - * Used to add a schema file containing the GQL types resolved by the engine. - * - * If multiple files are provided, their schemas will be merged together. - */ - public Builder addSchema(final String schema) { - _schemas.add(schema); - return this; - } - - /** - * Used to register a {@link DataLoader} to be used within the configured resolvers. - * - * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoader(final String name, final Function> dataLoaderSupplier) { - _loaderSuppliers.put(name, dataLoaderSupplier); - return this; - } - - /** - * Used to register multiple {@link DataLoader}s for use within the configured resolvers. - * - * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoaders(Map>> dataLoaderSuppliers) { - _loaderSuppliers.putAll(dataLoaderSuppliers); - return this; - } - - /** - * Used to configure the runtime wiring (data fetchers & type resolvers) - * used in resolving the Graph QL schema. - * - * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required - * data + type resolvers. - */ - public Builder configureRuntimeWiring(final Consumer builderFunc) { - builderFunc.accept(_runtimeWiringBuilder); - return this; - } - - /** - * Builds a {@link GraphQLEngine}. - */ - public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); - } + public Builder configureRuntimeWiring(final Consumer builderFunc) { + if (!this.graphQLQueryIntrospectionEnabled) + _runtimeWiringBuilder.fieldVisibility( + NoIntrospectionGraphqlFieldVisibility.NO_INTROSPECTION_FIELD_VISIBILITY); + builderFunc.accept(_runtimeWiringBuilder); + return this; + } + + public Builder setGraphQLQueryComplexityLimit(final int queryComplexityLimit) { + this.graphQLQueryComplexityLimit = queryComplexityLimit; + return this; } - private DataLoaderRegistry createDataLoaderRegistry(final Map>> dataLoaderSuppliers, - final QueryContext context) { - final DataLoaderRegistry registry = new DataLoaderRegistry(); - for (String key : dataLoaderSuppliers.keySet()) { - registry.register(key, dataLoaderSuppliers.get(key).apply(context)); - } - return registry; + public Builder setGraphQLQueryDepthLimit(final int queryDepthLimit) { + this.graphQLQueryDepthLimit = queryDepthLimit; + return this; } + public Builder setGraphQLQueryIntrospectionEnabled(final boolean introspectionEnabled) { + this.graphQLQueryIntrospectionEnabled = introspectionEnabled; + return this; + } + + /** Builds a {@link GraphQLEngine}. */ + public GraphQLEngine build() { + return new GraphQLEngine( + _schemas, + _runtimeWiringBuilder.build(), + _loaderSuppliers, + graphQLQueryComplexityLimit, + graphQLQueryDepthLimit, + graphQLQueryIntrospectionEnabled); + } + } + + private DataLoaderRegistry createDataLoaderRegistry( + final Map>> dataLoaderSuppliers, + final QueryContext context) { + final DataLoaderRegistry registry = new DataLoaderRegistry(); + for (String key : dataLoaderSuppliers.keySet()) { + registry.register(key, dataLoaderSuppliers.get(key).apply(context)); + } + return registry; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 4803ef08fdddcf..7dffd90cf2d7cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -3,39 +3,32 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; +import io.datahubproject.metadata.context.OperationContext; - -/** - * Provided as input to GraphQL resolvers; used to carry information about GQL request context. - */ +/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { - /** - * Returns true if the current actor is authenticated, false otherwise. - */ - boolean isAuthenticated(); - - /** - * Returns the {@link Authentication} associated with the current query context. - */ - Authentication getAuthentication(); - - /** - * Returns the current authenticated actor, null if there is none. - */ - default Actor getActor() { - return getAuthentication().getActor(); - } - - /** - * Returns the current authenticated actor, null if there is none. - */ - default String getActorUrn() { - return getActor().toUrnStr(); - } - - /** - * Returns the authorizer used to authorize specific actions. - */ - Authorizer getAuthorizer(); + /** Returns true if the current actor is authenticated, false otherwise. */ + boolean isAuthenticated(); + + /** Returns the {@link Authentication} associated with the current query context. */ + Authentication getAuthentication(); + + /** Returns the current authenticated actor, null if there is none. */ + default Actor getActor() { + return getAuthentication().getActor(); + } + + /** Returns the current authenticated actor, null if there is none. */ + default String getActorUrn() { + return getActor().toUrnStr(); + } + + /** Returns the authorizer used to authorize specific actions. */ + Authorizer getAuthorizer(); + + /** + * @return Returns the operational context + */ + OperationContext getOperationContext(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java index df7f0884852d47..425c86ab0f0f65 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class RelationshipKey { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java index c74d84d8be3230..b0422ed4bde6a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/SubTypesResolver.java @@ -2,6 +2,7 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -15,33 +16,43 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @AllArgsConstructor public class SubTypesResolver implements DataFetcher> { - EntityClient _entityClient; - String _entityType; - String _aspectName; + EntityClient _entityClient; + String _entityType; + String _aspectName; - @Override - @Nullable - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - SubTypes subType = null; - final String urnStr = ((Entity) environment.getSource()).getUrn(); - try { - final Urn urn = Urn.createFromString(urnStr); - EntityResponse entityResponse = _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(_aspectName), context.getAuthentication()).get(urn); - if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { - subType = new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); - } - } catch (RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); + @Override + @Nullable + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + SubTypes subType = null; + final String urnStr = ((Entity) environment.getSource()).getUrn(); + try { + final Urn urn = Urn.createFromString(urnStr); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + context.getOperationContext(), + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(_aspectName)) + .get(urn); + if (entityResponse != null && entityResponse.getAspects().containsKey(_aspectName)) { + subType = + new SubTypes(entityResponse.getAspects().get(_aspectName).getValue().data()); } - return subType; - }); - } + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + _aspectName + " for urn " + urnStr + " ", e); + } + return subType; + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java index d51de6652bb0ac..c3ad37ddcb2018 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java @@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs { private Long count; private TimeRange timeRange; - public TimeSeriesAspectArgs( - String urn, - String aspectName, - Long count, - TimeRange timeRange) { + public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) { this.urn = urn; this.aspectName = aspectName; this.count = count; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java index 5f703f520bde46..c7302c9772c5ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java @@ -3,7 +3,6 @@ import com.linkedin.usage.UsageTimeRange; import lombok.Data; - @Data public class UsageStatsKey { private String resource; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java index b0c0436ffd891a..6f81de5f04d8fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java @@ -8,7 +8,7 @@ public class VersionedAspectKey { private String urn; private Long version; - public VersionedAspectKey(String urn, String aspectName, Long version) { + public VersionedAspectKey(String urn, String aspectName, Long version) { this.urn = urn; this.version = version; this.aspectName = aspectName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index a78d89e59bc7bc..b6599c38e6f425 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -1,14 +1,17 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.codec.JacksonDataCodec; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AspectParams; import com.linkedin.datahub.graphql.generated.AspectRenderSpec; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.RawAspect; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.models.AspectSpec; @@ -26,68 +29,91 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @AllArgsConstructor public class WeaklyTypedAspectsResolver implements DataFetcher>> { - private final EntityClient _entityClient; - private final EntityRegistry _entityRegistry; - private static final JacksonDataCodec CODEC = new JacksonDataCodec(); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + private static final JacksonDataCodec CODEC = new JacksonDataCodec(); - private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { - return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); - } + private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { + return (params.getAutoRenderOnly() == null + || !params.getAutoRenderOnly() + || aspectSpec.isAutoRender()) + && (params.getAspectNames() == null + || params.getAspectNames().isEmpty() + || params.getAspectNames().contains(aspectSpec.getName())); + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - List results = new ArrayList<>(); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + List results = new ArrayList<>(); - final QueryContext context = environment.getContext(); - final String urnStr = ((Entity) environment.getSource()).getUrn(); - final EntityType entityType = ((Entity) environment.getSource()).getType(); - final String entityTypeName = EntityTypeMapper.getName(entityType); - final AspectParams input = bindArgument(environment.getArgument("input"), AspectParams.class); + final QueryContext context = environment.getContext(); + final String urnStr = ((Entity) environment.getSource()).getUrn(); + final EntityType entityType = ((Entity) environment.getSource()).getType(); + final String entityTypeName = EntityTypeMapper.getName(entityType); + final AspectParams input = + bindArgument(environment.getArgument("input"), AspectParams.class); - EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); - entitySpec.getAspectSpecs().stream().filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)).forEach(aspectSpec -> { - try { - Urn urn = Urn.createFromString(urnStr); - RawAspect result = new RawAspect(); - EntityResponse entityResponse = - _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(aspectSpec.getName()), context.getAuthentication()).get(urn); - if (entityResponse == null || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { + EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); + entitySpec.getAspectSpecs().stream() + .filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)) + .forEach( + aspectSpec -> { + try { + Urn urn = Urn.createFromString(urnStr); + RawAspect result = new RawAspect(); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + context.getOperationContext(), + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(aspectSpec.getName())) + .get(urn); + if (entityResponse == null + || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { return; - } + } - DataMap resolvedAspect = entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); - if (resolvedAspect == null) { + DataMap resolvedAspect = + entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); + if (resolvedAspect == null) { return; - } + } - result.setPayload(CODEC.mapToString(resolvedAspect)); - result.setAspectName(aspectSpec.getName()); + result.setPayload(CODEC.mapToString(resolvedAspect)); + result.setAspectName(aspectSpec.getName()); - DataMap renderSpec = aspectSpec.getRenderSpec(); + DataMap renderSpec = aspectSpec.getRenderSpec(); - if (renderSpec != null) { + if (renderSpec != null) { AspectRenderSpec resultRenderSpec = new AspectRenderSpec(); resultRenderSpec.setDisplayType(renderSpec.getString("displayType")); resultRenderSpec.setDisplayName(renderSpec.getString("displayName")); resultRenderSpec.setKey(renderSpec.getString("key")); result.setRenderSpec(resultRenderSpec); - } + } - results.add(result); - } catch (IOException | RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + aspectSpec.getName() + " for urn " + urnStr + " ", e); - } - }); - return results; - }); - } + results.add(result); + } catch (IOException | RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + + aspectSpec.getName() + + " for urn " + + urnStr + + " ", + e); + } + }); + return results; + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java index 7728dcae5d8eef..3bf932c4281e8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java @@ -7,18 +7,17 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - public class AnalyticsChartTypeResolver implements TypeResolver { - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TimeSeriesChart) { - return env.getSchema().getObjectType("TimeSeriesChart"); - } else if (env.getObject() instanceof BarChart) { - return env.getSchema().getObjectType("BarChart"); - } else if (env.getObject() instanceof TableChart) { - return env.getSchema().getObjectType("TableChart"); - } else { - throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TimeSeriesChart) { + return env.getSchema().getObjectType("TimeSeriesChart"); + } else if (env.getObject() instanceof BarChart) { + return env.getSchema().getObjectType("BarChart"); + } else if (env.getObject() instanceof TableChart) { + return env.getSchema().getObjectType("TableChart"); + } else { + throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index b8a5dd1121a109..4847aea224ccd6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -1,41 +1,56 @@ package com.linkedin.datahub.graphql.analytics.resolver; -import com.datahub.authentication.Authentication; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_STATUS_LAST_MODIFIED_FIELD_NAME; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.analytics.service.AnalyticsService; import com.linkedin.datahub.graphql.analytics.service.AnalyticsUtil; import com.linkedin.datahub.graphql.generated.AnalyticsChart; import com.linkedin.datahub.graphql.generated.AnalyticsChartGroup; import com.linkedin.datahub.graphql.generated.BarChart; +import com.linkedin.datahub.graphql.generated.Cell; import com.linkedin.datahub.graphql.generated.DateInterval; import com.linkedin.datahub.graphql.generated.DateRange; +import com.linkedin.datahub.graphql.generated.EntityProfileParams; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.LinkParams; import com.linkedin.datahub.graphql.generated.NamedBar; import com.linkedin.datahub.graphql.generated.NamedLine; import com.linkedin.datahub.graphql.generated.Row; import com.linkedin.datahub.graphql.generated.TableChart; import com.linkedin.datahub.graphql.generated.TimeSeriesChart; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.util.DateUtil; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Optional; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @Slf4j @RequiredArgsConstructor public final class GetChartsResolver implements DataFetcher> { @@ -44,18 +59,20 @@ public final class GetChartsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - Authentication authentication = ResolverUtils.getAuthentication(environment); + public List get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); try { - return ImmutableList.of(AnalyticsChartGroup.builder() - .setGroupId("DataHubUsageAnalytics") - .setTitle("DataHub Usage Analytics") - .setCharts(getProductAnalyticsCharts(authentication)) - .build(), AnalyticsChartGroup.builder() - .setGroupId("GlobalMetadataAnalytics") - .setTitle("Data Landscape Summary") - .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) - .build()); + return ImmutableList.of( + AnalyticsChartGroup.builder() + .setGroupId("DataHubUsageAnalytics") + .setTitle("Usage Analytics") + .setCharts(getProductAnalyticsCharts(context.getOperationContext())) + .build(), + AnalyticsChartGroup.builder() + .setGroupId("GlobalMetadataAnalytics") + .setTitle("Data Landscape Summary") + .setCharts(getGlobalMetadataAnalyticsCharts(context.getOperationContext())) + .build()); } catch (Exception e) { log.error("Failed to retrieve analytics charts!", e); return Collections.emptyList(); // Simply return nothing. @@ -63,147 +80,387 @@ public final List get(DataFetchingEnvironment environment) } private TimeSeriesChart getActiveUsersTimeSeriesChart( - final DateTime beginning, - final DateTime end, - final String title, - final DateInterval interval - ) { + final DateTime beginning, + final DateTime end, + final String title, + final DateInterval interval) { final DateRange dateRange = - new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); + new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); final List timeSeriesLines = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), dateRange, interval, - Optional.empty(), ImmutableMap.of(), Collections.emptyMap(), Optional.of("browserId")); + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + dateRange, + interval, + Optional.empty(), + ImmutableMap.of(), + Collections.emptyMap(), + Optional.of("browserId")); return TimeSeriesChart.builder() - .setTitle(title) - .setDateRange(dateRange) - .setInterval(interval) - .setLines(timeSeriesLines) - .build(); + .setTitle(title) + .setDateRange(dateRange) + .setInterval(interval) + .setLines(timeSeriesLines) + .build(); + } + + @Nullable + private AnalyticsChart getTopUsersChart(OperationContext opContext) { + try { + final DateUtil dateUtil = new DateUtil(); + final DateRange trailingMonthDateRange = dateUtil.getTrailingMonthDateRange(); + final List columns = ImmutableList.of("Name", "Title", "Email"); + + final String topUsersTitle = "Top Users"; + final List topUserRows = + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingMonthDateRange), + "actorUrn.keyword", + Collections.emptyMap(), + ImmutableMap.of( + "actorUrn.keyword", + ImmutableList.of("urn:li:corpuser:admin", "urn:li:corpuser:datahub")), + Optional.empty(), + 30, + AnalyticsUtil::buildCellWithEntityLandingPage); + AnalyticsUtil.convertToUserInfoRows(opContext, _entityClient, topUserRows); + return TableChart.builder() + .setTitle(topUsersTitle) + .setColumns(columns) + .setRows(topUserRows) + .build(); + } catch (Exception e) { + log.error("Failed to retrieve top users chart!", e); + return null; + } + } + + private SearchResult searchForNewUsers(@Nonnull final OperationContext opContext) + throws Exception { + // Search for new users in the past month. + final DateUtil dateUtil = new DateUtil(); + final DateRange trailingMonthDateRange = dateUtil.getTrailingMonthDateRange(); + return _entityClient.search( + opContext, + CORP_USER_ENTITY_NAME, + "*", + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(CORP_USER_STATUS_LAST_MODIFIED_FIELD_NAME) + .setCondition(Condition.GREATER_THAN) + .setValue( + String.valueOf( + trailingMonthDateRange.getStart())))))))), + Collections.singletonList( + new SortCriterion() + .setField(CORP_USER_STATUS_LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + 0, + 100); + } + + @Nonnull + private Row buildNewUsersRow(@Nonnull final SearchEntity entity) { + final Row row = new Row(); + row.setValues(ImmutableList.of(entity.getEntity().toString())); + final Cell cell = new Cell(); + cell.setValue(entity.getEntity().toString()); + cell.setEntity(UrnToEntityMapper.map(null, entity.getEntity())); + cell.setLinkParams( + new LinkParams( + null, new EntityProfileParams(entity.getEntity().toString(), EntityType.CORP_USER))); + row.setCells(ImmutableList.of(cell)); + return row; + } + + @Nullable + private AnalyticsChart getNewUsersChart(OperationContext opContext) { + try { + final List columns = ImmutableList.of("Name", "Title", "Email"); + final String newUsersTitle = "New Users"; + final SearchResult result = searchForNewUsers(opContext); + final List newUserRows = new ArrayList<>(); + for (SearchEntity entity : result.getEntities()) { + newUserRows.add(buildNewUsersRow(entity)); + } + AnalyticsUtil.convertToUserInfoRows(opContext, _entityClient, newUserRows); + return TableChart.builder() + .setTitle(newUsersTitle) + .setColumns(columns) + .setRows(newUserRows) + .build(); + } catch (Exception e) { + log.error("Failed to retrieve new users chart!", e); + return null; + } } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ - private List getProductAnalyticsCharts(Authentication authentication) throws Exception { + /** TODO: Config Driven Charts Instead of Hardcoded. */ + private List getProductAnalyticsCharts(OperationContext opContext) + throws Exception { final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); + final DateTime startOfThisMonth = dateUtil.getStartOfThisMonth(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); - charts.add(getActiveUsersTimeSeriesChart( + // WAU + charts.add( + getActiveUsersTimeSeriesChart( startOfNextWeek.minusWeeks(10), startOfNextWeek.minusMillis(1), "Weekly Active Users", - DateInterval.WEEK - )); - charts.add(getActiveUsersTimeSeriesChart( + DateInterval.WEEK)); + + // MAU + charts.add( + getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), - startOfNextMonth.minusMillis(1), + startOfThisMonth.minusMillis(1), "Monthly Active Users", - DateInterval.MONTH - )); + DateInterval.MONTH)); - String searchesTitle = "Searches Last Week"; + // New users chart - past month + final AnalyticsChart newUsersChart = getNewUsersChart(opContext); + if (newUsersChart != null) { + charts.add(newUsersChart); + } + + // Top users chart - past month + final AnalyticsChart topUsersChart = getTopUsersChart(opContext); + if (topUsersChart != null) { + charts.add(topUsersChart); + } + + String searchesTitle = "Number of Searches"; DateInterval dailyInterval = DateInterval.DAY; String searchEventType = "SearchEvent"; final List searchesTimeseries = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), trailingWeekDateRange, dailyInterval, - Optional.empty(), ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + trailingWeekDateRange, + dailyInterval, + Optional.empty(), + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), Optional.empty()); - charts.add(TimeSeriesChart.builder() - .setTitle(searchesTitle) - .setDateRange(trailingWeekDateRange) - .setInterval(dailyInterval) - .setLines(searchesTimeseries) - .build()); - - final String topSearchTitle = "Top Search Queries"; + charts.add( + TimeSeriesChart.builder() + .setTitle(searchesTitle) + .setDateRange(trailingWeekDateRange) + .setInterval(dailyInterval) + .setLines(searchesTimeseries) + .build()); + + final String topSearchTitle = "Top Searches (Past Week)"; final List columns = ImmutableList.of("Query", "Count"); final List topSearchQueries = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "query.keyword", ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), - Optional.empty(), 10, AnalyticsUtil::buildCellWithSearchLandingPage); - charts.add(TableChart.builder().setTitle(topSearchTitle).setColumns(columns).setRows(topSearchQueries).build()); + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "query.keyword", + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithSearchLandingPage); + charts.add( + TableChart.builder() + .setTitle(topSearchTitle) + .setColumns(columns) + .setRows(topSearchQueries) + .build()); - final String sectionViewsTitle = "Section Views across Entity Types"; + final String topViewedDatasetsTitle = "Top Viewed Datasets (Past Week)"; + final List columns5 = ImmutableList.of("Name", "View Count"); + final List topViewedDatasets = + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DATASET.name())), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithEntityLandingPage); + AnalyticsUtil.hydrateDisplayNameForTable( + opContext, + _entityClient, + topViewedDatasets, + Constants.DATASET_ENTITY_NAME, + ImmutableSet.of( + Constants.DATASET_KEY_ASPECT_NAME, Constants.DATASET_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDatasetName); + charts.add( + TableChart.builder() + .setTitle(topViewedDatasetsTitle) + .setColumns(columns5) + .setRows(topViewedDatasets) + .build()); + + final String topViewedDashboardsTitle = "Top Viewed Dashboards (Past Week)"; + final List columns6 = ImmutableList.of("Name", "View Count"); + final List topViewedDashboards = + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DASHBOARD.name())), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithEntityLandingPage); + AnalyticsUtil.hydrateDisplayNameForTable( + opContext, + _entityClient, + topViewedDashboards, + Constants.DASHBOARD_ENTITY_NAME, + ImmutableSet.of(Constants.DASHBOARD_INFO_ASPECT_NAME), + AnalyticsUtil::getDashboardName); + charts.add( + TableChart.builder() + .setTitle(topViewedDashboardsTitle) + .setColumns(columns6) + .setRows(topViewedDashboards) + .build()); + + final String sectionViewsTitle = "Tab Views By Entity Type (Past Week)"; final List sectionViewsPerEntityType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "section.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), Collections.emptyMap(), - Optional.empty(), true); - charts.add(BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); + ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), + Collections.emptyMap(), + Optional.empty(), + true); + charts.add( + BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); - final String actionsByTypeTitle = "Actions by Entity Type"; + final String actionsByTypeTitle = "Actions By Entity Type (Past Week)"; final List eventsByEventType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "actionType.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), Collections.emptyMap(), Optional.empty(), + ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), + Collections.emptyMap(), + Optional.empty(), true); charts.add(BarChart.builder().setTitle(actionsByTypeTitle).setBars(eventsByEventType).build()); - final String topViewedTitle = "Top Viewed Dataset"; - final List columns5 = ImmutableList.of("Dataset", "#Views"); - - final List topViewedDatasets = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "entityUrn.keyword", ImmutableMap.of("type", ImmutableList.of("EntityViewEvent"), "entityType.keyword", - ImmutableList.of(EntityType.DATASET.name())), Collections.emptyMap(), Optional.empty(), 10, - AnalyticsUtil::buildCellWithEntityLandingPage); - AnalyticsUtil.hydrateDisplayNameForTable(_entityClient, topViewedDatasets, Constants.DATASET_ENTITY_NAME, - ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), AnalyticsUtil::getDatasetName, authentication); - charts.add(TableChart.builder().setTitle(topViewedTitle).setColumns(columns5).setRows(topViewedDatasets).build()); - return charts; } - private List getGlobalMetadataAnalyticsCharts(Authentication authentication) throws Exception { + private List getGlobalMetadataAnalyticsCharts(OperationContext opContext) + throws Exception { final List charts = new ArrayList<>(); // Chart 1: Entities per domain final List entitiesPerDomain = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("domains.keyword", "platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerDomain, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - AnalyticsUtil.hydrateDisplayNameForSegments(_entityClient, entitiesPerDomain, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("domains.keyword", "platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + entitiesPerDomain, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName); + AnalyticsUtil.hydrateDisplayNameForSegments( + opContext, + _entityClient, + entitiesPerDomain, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName); if (!entitiesPerDomain.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); + charts.add( + BarChart.builder().setTitle("Entities By Domain").setBars(entitiesPerDomain).build()); } // Chart 2: Entities per platform final List entitiesPerPlatform = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerPlatform, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + entitiesPerPlatform, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName); if (!entitiesPerPlatform.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Platform").setBars(entitiesPerPlatform).build()); + charts.add( + BarChart.builder().setTitle("Assets By Platform").setBars(entitiesPerPlatform).build()); } // Chart 3: Entities per term final List entitiesPerTerm = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("glossaryTerms.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerTerm, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("glossaryTerms.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + entitiesPerTerm, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName); if (!entitiesPerTerm.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Term").setBars(entitiesPerTerm).build()); + charts.add( + BarChart.builder().setTitle("Entities With Term").setBars(entitiesPerTerm).build()); } // Chart 4: Entities per fabric type final List entitiesPerEnv = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("origin.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("origin.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); if (entitiesPerEnv.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); + charts.add( + BarChart.builder().setTitle("Entities By Environment").setBars(entitiesPerEnv).build()); } return charts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java index c631a13b0bcb6d..7000ab7adff5d5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java @@ -14,15 +14,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetHighlightsResolver implements DataFetcher> { @@ -40,69 +36,72 @@ public final List get(DataFetchingEnvironment environment) throws Exc } private Highlight getTimeBasedHighlight( - final String title, - final String changeString, - final DateTime endDateTime, - final Function periodStartFunc - ) { + final String title, + final String changeString, + final DateTime endDateTime, + final Function periodStartFunc) { DateTime startDate = periodStartFunc.apply(endDateTime); DateTime timeBeforeThat = periodStartFunc.apply(startDate); - DateRange dateRangeThis = new DateRange( - String.valueOf(startDate.getMillis()), - String.valueOf(endDateTime.getMillis()) - ); - DateRange dateRangeLast = new DateRange( - String.valueOf(timeBeforeThat.getMillis()), - String.valueOf(startDate.getMillis()) - ); - - int activeUsersThisRange = _analyticsService.getHighlights( + DateRange dateRangeThis = + new DateRange( + String.valueOf(startDate.getMillis()), String.valueOf(endDateTime.getMillis())); + DateRange dateRangeLast = + new DateRange( + String.valueOf(timeBeforeThat.getMillis()), String.valueOf(startDate.getMillis())); + + int activeUsersThisRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeThis), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); - int activeUsersLastRange = _analyticsService.getHighlights( + Optional.of("browserId")); + int activeUsersLastRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeLast), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); + Optional.of("browserId")); String bodyText = ""; if (activeUsersLastRange > 0) { - double percentChange = (double) (activeUsersThisRange - activeUsersLastRange) - / (double) activeUsersLastRange * 100; + double percentChange = + (double) (activeUsersThisRange - activeUsersLastRange) + / (double) activeUsersLastRange + * 100; String directionChange = percentChange > 0 ? "increase" : "decrease"; - bodyText = Double.isInfinite(percentChange) ? "" + bodyText = + Double.isInfinite(percentChange) + ? "" : String.format(changeString, percentChange, directionChange); } - return Highlight.builder().setTitle(title).setValue(activeUsersThisRange).setBody(bodyText).build(); + return Highlight.builder() + .setTitle(title) + .setValue(activeUsersThisRange) + .setBody(bodyText) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ + /** TODO: Config Driven Charts Instead of Hardcoded. */ private List getHighlights() { final List highlights = new ArrayList<>(); DateTime endDate = DateTime.now(); - highlights.add(getTimeBasedHighlight( + highlights.add( + getTimeBasedHighlight( "Weekly Active Users", "%.2f%% %s from last week", endDate, - (date) -> date.minusWeeks(1) - )); - highlights.add(getTimeBasedHighlight( + (date) -> date.minusWeeks(1))); + highlights.add( + getTimeBasedHighlight( "Monthly Active Users", "%.2f%% %s from last month", endDate, - (date) -> date.minusMonths(1) - )); + (date) -> date.minusMonths(1))); // Entity metdata statistics getEntityMetadataStats("Datasets", EntityType.DATASET).ifPresent(highlights::add); @@ -121,10 +120,13 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (numEntities == 0) { return Optional.empty(); } - int numEntitiesWithOwners = getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); - int numEntitiesWithTags = getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); + int numEntitiesWithOwners = + getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); + int numEntitiesWithTags = + getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); int numEntitiesWithGlossaryTerms = - getNumEntitiesFiltered(index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); + getNumEntitiesFiltered( + index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); int numEntitiesWithDescription = getNumEntitiesFiltered(index, ImmutableMap.of("hasDescription", ImmutableList.of("true"))); @@ -137,22 +139,36 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (entityType == EntityType.DOMAIN) { // Don't show percent with domain when asking for stats regarding domains bodyText = - String.format("%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription); + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription); } else { int numEntitiesWithDomains = getNumEntitiesFiltered(index, ImmutableMap.of("hasDomain", ImmutableList.of("true"))); double percentWithDomains = 100.0 * numEntitiesWithDomains / numEntities; - bodyText = String.format( - "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription, percentWithDomains); + bodyText = + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription, + percentWithDomains); } } - return Optional.of(Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); + return Optional.of( + Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); } private int getNumEntitiesFiltered(String index, Map> filters) { - return _analyticsService.getHighlights(index, Optional.empty(), filters, - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty()); + return _analyticsService.getHighlights( + index, + Optional.empty(), + filters, + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index f61c2eb77739bf..6045b1e726c7a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.analytics.resolver; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.analytics.service.AnalyticsUtil; import com.linkedin.datahub.graphql.generated.AnalyticsChart; import com.linkedin.datahub.graphql.generated.AnalyticsChartGroup; @@ -10,8 +12,7 @@ import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.MetadataAnalyticsInput; import com.linkedin.datahub.graphql.generated.NamedBar; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; @@ -20,6 +21,7 @@ import com.linkedin.metadata.search.utils.QueryUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -30,12 +32,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetMetadataAnalyticsResolver implements DataFetcher> { @@ -44,14 +41,15 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final Authentication authentication = ResolverUtils.getAuthentication(environment); - final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); + final QueryContext context = environment.getContext(); + final MetadataAnalyticsInput input = + bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); try { final AnalyticsChartGroup group = new AnalyticsChartGroup(); group.setGroupId("FilteredMetadataAnalytics"); group.setTitle(""); - group.setCharts(getCharts(input, authentication)); + group.setCharts(getCharts(input, context.getOperationContext())); return ImmutableList.of(group); } catch (Exception e) { log.error("Failed to retrieve metadata analytics!", e); @@ -59,7 +57,8 @@ public final List get(DataFetchingEnvironment environment) } } - private List getCharts(MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List getCharts(MetadataAnalyticsInput input, OperationContext opContext) + throws Exception { final List charts = new ArrayList<>(); List entities = Collections.emptyList(); @@ -77,48 +76,76 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat filter = QueryUtils.newFilter("domains.keyword", input.getDomain()); } - SearchResult searchResult = _entityClient.searchAcrossEntities(entities, query, filter, 0, 0, - null, null, authentication); + SearchResult searchResult = + _entityClient.searchAcrossEntities( + opContext, entities, query, filter, 0, 0, Collections.emptyList(), null); - List aggregationMetadataList = searchResult.getMetadata().getAggregations(); + List aggregationMetadataList = + searchResult.getMetadata().getAggregations(); Optional domainAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("domains")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("domains")) + .findFirst(); if (StringUtils.isEmpty(input.getDomain()) && domainAggregation.isPresent()) { List domainChart = buildBarChart(domainAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, domainChart, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + domainChart, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName); + charts.add(BarChart.builder().setTitle("Data Assets by Domain").setBars(domainChart).build()); } Optional platformAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("platform")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("platform")) + .findFirst(); if (platformAggregation.isPresent()) { List platformChart = buildBarChart(platformAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, platformChart, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + platformChart, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName); + charts.add( + BarChart.builder().setTitle("Data Assets by Platform").setBars(platformChart).build()); } Optional termAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("glossaryTerms")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("glossaryTerms")) + .findFirst(); if (termAggregation.isPresent()) { List termChart = buildBarChart(termAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, termChart, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + opContext, + _entityClient, + termChart, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName); + charts.add(BarChart.builder().setTitle("Data Assets by Term").setBars(termChart).build()); } Optional envAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("origin")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("origin")) + .findFirst(); if (envAggregation.isPresent()) { List termChart = buildBarChart(envAggregation.get()); if (termChart.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); + charts.add( + BarChart.builder().setTitle("Data Assets by Environment").setBars(termChart).build()); } } @@ -126,16 +153,20 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat } private List buildBarChart(AggregationMetadata aggregation) { - return aggregation.getAggregations() - .entrySet() - .stream() + return aggregation.getAggregations().entrySet().stream() .sorted(Collections.reverseOrder(Map.Entry.comparingByValue())) .limit(10) - .map(entry -> NamedBar.builder() - .setName(entry.getKey()) - .setSegments(ImmutableList.of( - BarSegment.builder().setLabel("#Entities").setValue(entry.getValue().intValue()).build())) - .build()) + .map( + entry -> + NamedBar.builder() + .setName(entry.getKey()) + .setSegments( + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(entry.getValue().intValue()) + .build())) + .build()) .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java index 8e3bffc9ccf08d..c7f5c0bbc63eb5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java @@ -3,20 +3,17 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -/** - * Returns true if analytics feature flag is enabled, false otherwise. - */ +/** Returns true if analytics feature flag is enabled, false otherwise. */ public class IsAnalyticsEnabledResolver implements DataFetcher { private final Boolean _isAnalyticsEnabled; public IsAnalyticsEnabledResolver(final Boolean isAnalyticsEnabled) { - _isAnalyticsEnabled = isAnalyticsEnabled; + _isAnalyticsEnabled = isAnalyticsEnabled; } @Override public final Boolean get(DataFetchingEnvironment environment) throws Exception { - return _isAnalyticsEnabled; + return _isAnalyticsEnabled; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 4135a7b0da1482..72643ccac6325c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -10,7 +10,7 @@ import com.linkedin.datahub.graphql.generated.NamedLine; import com.linkedin.datahub.graphql.generated.NumericDataPoint; import com.linkedin.datahub.graphql.generated.Row; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import java.util.List; import java.util.Map; @@ -40,7 +40,6 @@ import org.opensearch.search.aggregations.metrics.Cardinality; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class AnalyticsService { @@ -72,25 +71,35 @@ public String getUsageIndexName() { return _indexConvention.getIndexName(DATAHUB_USAGE_EVENT_INDEX); } - public List getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity, + public List getTimeseriesChart( + String indexName, + DateRange dateRange, + DateInterval granularity, Optional dimension, // Length 1 for now - Map> filters, Map> mustNotFilters, Optional uniqueOn) { + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", - indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + String.format("filters: %s, uniqueOn: %s", filters, - uniqueOn)); - - AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); - - AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM) - .field("timestamp") - .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); + String.format( + "Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", + indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + + AggregationBuilder filteredAgg = + getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); + + AggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(DATE_HISTOGRAM) + .field("timestamp") + .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); uniqueOn.ifPresent(s -> dateHistogram.subAggregation(getUniqueQuery(s))); if (dimension.isPresent()) { filteredAgg.subAggregation( - AggregationBuilders.terms(DIMENSION).field(dimension.get()).subAggregation(dateHistogram)); + AggregationBuilders.terms(DIMENSION) + .field(dimension.get()) + .subAggregation(dateHistogram)); } else { filteredAgg.subAggregation(dateHistogram); } @@ -99,39 +108,55 @@ public List getTimeseriesChart(String indexName, DateRange dateRange, Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { if (dimension.isPresent()) { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedLine(bucket.getKeyAsString(), - extractPointsFromAggregations(bucket.getAggregations(), uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedLine( + bucket.getKeyAsString(), + extractPointsFromAggregations( + bucket.getAggregations(), uniqueOn.isPresent()))) .collect(Collectors.toList()); } else { return ImmutableList.of( - new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); + new NamedLine( + "total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); } } catch (Exception e) { - log.error(String.format("Caught exception while getting time series chart: %s", e.getMessage())); + log.error( + String.format("Caught exception while getting time series chart: %s", e.getMessage())); return ImmutableList.of(); } } private int extractCount(MultiBucketsAggregation.Bucket bucket, boolean didUnique) { - return didUnique ? (int) bucket.getAggregations().get(UNIQUE).getValue() : (int) bucket.getDocCount(); + return didUnique + ? (int) bucket.getAggregations().get(UNIQUE).getValue() + : (int) bucket.getDocCount(); } - private List extractPointsFromAggregations(Aggregations aggregations, boolean didUnique) { - return aggregations.get(DATE_HISTOGRAM).getBuckets() - .stream() - .map(bucket -> new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) + private List extractPointsFromAggregations( + Aggregations aggregations, boolean didUnique) { + return aggregations.get(DATE_HISTOGRAM).getBuckets().stream() + .map( + bucket -> + new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public List getBarChart(String indexName, Optional dateRange, List dimensions, + public List getBarChart( + String indexName, + Optional dateRange, + List dimensions, // Length 1 or 2 - Map> filters, Map> mustNotFilters, Optional uniqueOn, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, boolean showMissing) { log.debug( - String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", indexName, dateRange, - dimensions) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", + indexName, dateRange, dimensions) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); assert (dimensions.size() == 1 || dimensions.size() == 2); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); @@ -142,7 +167,8 @@ public List getBarChart(String indexName, Optional dateRang } if (dimensions.size() == 2) { - TermsAggregationBuilder secondTermAgg = AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); + TermsAggregationBuilder secondTermAgg = + AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); if (showMissing) { secondTermAgg.missing(NA); } @@ -161,14 +187,24 @@ public List getBarChart(String indexName, Optional dateRang List barSegments = extractBarSegmentsFromAggregations(aggregationResult, DIMENSION, uniqueOn.isPresent()); return barSegments.stream() - .map(segment -> new NamedBar(segment.getLabel(), - ImmutableList.of(BarSegment.builder().setLabel("Count").setValue(segment.getValue()).build()))) + .map( + segment -> + new NamedBar( + segment.getLabel(), + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(segment.getValue()) + .build()))) .collect(Collectors.toList()); } else { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedBar(bucket.getKeyAsString(), - extractBarSegmentsFromAggregations(bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedBar( + bucket.getKeyAsString(), + extractBarSegmentsFromAggregations( + bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) .collect(Collectors.toList()); } } catch (Exception e) { @@ -177,31 +213,42 @@ public List getBarChart(String indexName, Optional dateRang } } - private List extractBarSegmentsFromAggregations(Aggregations aggregations, String aggregationKey, - boolean didUnique) { - return aggregations.get(aggregationKey).getBuckets() - .stream() + private List extractBarSegmentsFromAggregations( + Aggregations aggregations, String aggregationKey, boolean didUnique) { + return aggregations.get(aggregationKey).getBuckets().stream() .map(bucket -> new BarSegment(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public Row buildRow(String groupByValue, Function groupByValueToCell, int count) { + public static Row buildRow( + String groupByValue, Function groupByValueToCell, int count) { List values = ImmutableList.of(groupByValue, String.valueOf(count)); - List cells = ImmutableList.of(groupByValueToCell.apply(groupByValue), - Cell.builder().setValue(String.valueOf(count)).build()); + List cells = + ImmutableList.of( + groupByValueToCell.apply(groupByValue), + Cell.builder().setValue(String.valueOf(count)).build()); return new Row(values, cells); } - public List getTopNTableChart(String indexName, Optional dateRange, String groupBy, - Map> filters, Map> mustNotFilters, Optional uniqueOn, - int maxRows, Function groupByValueToCell) { + public List getTopNTableChart( + String indexName, + Optional dateRange, + String groupBy, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, + int maxRows, + Function groupByValueToCell) { log.debug( - String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", indexName, dateRange, - groupBy) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", + indexName, dateRange, groupBy) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); - TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); + TermsAggregationBuilder termAgg = + AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); if (uniqueOn.isPresent()) { termAgg.order(BucketOrder.aggregation(UNIQUE, false)); termAgg.subAggregation(getUniqueQuery(uniqueOn.get())); @@ -212,10 +259,13 @@ public List getTopNTableChart(String indexName, Optional dateRan Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> buildRow(bucket.getKeyAsString(), groupByValueToCell, - extractCount(bucket, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + buildRow( + bucket.getKeyAsString(), + groupByValueToCell, + extractCount(bucket, uniqueOn.isPresent()))) .collect(Collectors.toList()); } catch (Exception e) { log.error(String.format("Caught exception while getting top n chart: %s", e.getMessage())); @@ -223,11 +273,16 @@ public List getTopNTableChart(String indexName, Optional dateRan } } - public int getHighlights(String indexName, Optional dateRange, Map> filters, - Map> mustNotFilters, Optional uniqueOn) { + public int getHighlights( + String indexName, + Optional dateRange, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + String.format( - "filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); uniqueOn.ifPresent(s -> filteredAgg.subAggregation(getUniqueQuery(s))); @@ -246,7 +301,8 @@ public int getHighlights(String indexName, Optional dateRange, Mapget(FILTERED); } catch (Exception e) { @@ -266,11 +323,14 @@ private Filter executeAndExtract(SearchRequest searchRequest) { } } - private AggregationBuilder getFilteredAggregation(Map> mustFilters, - Map> mustNotFilters, Optional dateRange) { + private AggregationBuilder getFilteredAggregation( + Map> mustFilters, + Map> mustNotFilters, + Optional dateRange) { BoolQueryBuilder filteredQuery = QueryBuilders.boolQuery(); mustFilters.forEach((key, values) -> filteredQuery.must(QueryBuilders.termsQuery(key, values))); - mustNotFilters.forEach((key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); + mustNotFilters.forEach( + (key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); dateRange.ifPresent(range -> filteredQuery.must(dateRangeQuery(range))); return AggregationBuilders.filter(FILTERED, filteredQuery); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index 42f4e25c010efa..a17745948eb823 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.analytics.service; -import com.datahub.authentication.Authentication; +import static com.linkedin.metadata.Constants.CORP_USER_INFO_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.dashboard.DashboardInfo; import com.linkedin.datahub.graphql.generated.BarSegment; import com.linkedin.datahub.graphql.generated.Cell; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,15 +16,18 @@ import com.linkedin.datahub.graphql.generated.SearchParams; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.dataplatform.DataPlatformInfo; +import com.linkedin.dataset.DatasetProperties; import com.linkedin.domain.DomainProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.identity.CorpUserInfo; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.key.DatasetKey; import com.linkedin.metadata.key.GlossaryTermKey; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; +import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -28,19 +35,22 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - @Slf4j public class AnalyticsUtil { - private AnalyticsUtil() { - } + private AnalyticsUtil() {} public static Cell buildCellWithSearchLandingPage(String query) { Cell result = new Cell(); result.setValue(query); - result.setLinkParams(LinkParams.builder().setSearchParams(SearchParams.builder().setQuery(query).build()).build()); + result.setLinkParams( + LinkParams.builder() + .setSearchParams(SearchParams.builder().setQuery(query).build()) + .build()); return result; } @@ -48,72 +58,194 @@ public static Cell buildCellWithEntityLandingPage(String urn) { Cell result = new Cell(); result.setValue(urn); try { - Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); + Entity entity = UrnToEntityMapper.map(null, Urn.createFromString(urn)); result.setEntity(entity); - result.setLinkParams(LinkParams.builder() - .setEntityProfileParams(EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) - .build()); + result.setLinkParams( + LinkParams.builder() + .setEntityProfileParams( + EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) + .build()); } catch (URISyntaxException e) { log.error("Malformed urn {} in table", urn, e); } return result; } - public static void hydrateDisplayNameForBars(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { + public static void hydrateDisplayNameForBars( + @Nonnull OperationContext opContext, + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName) + throws Exception { Map urnToDisplayName = - getUrnToDisplayName(entityClient, bars.stream().map(NamedBar::getName).collect(Collectors.toList()), entityName, - aspectNames, extractDisplayName, authentication); + getUrnToDisplayName( + opContext, + entityClient, + bars.stream().map(NamedBar::getName).collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.setName(urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); + bars.forEach( + namedBar -> + namedBar.setName( + urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); } - public static void hydrateDisplayNameForSegments(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, - bars.stream().flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)).collect(Collectors.toList()), - entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForSegments( + @Nonnull OperationContext opContext, + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + opContext, + entityClient, + bars.stream() + .flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.getSegments() - .forEach(segment -> segment.setLabel(urnToDisplayName.getOrDefault(segment.getLabel(), segment.getLabel())))); + bars.forEach( + namedBar -> + namedBar + .getSegments() + .forEach( + segment -> + segment.setLabel( + urnToDisplayName.getOrDefault( + segment.getLabel(), segment.getLabel())))); } - public static void hydrateDisplayNameForTable(EntityClient entityClient, List rows, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, rows.stream() - .flatMap(row -> row.getCells().stream().filter(cell -> cell.getEntity() != null).map(Cell::getValue)) - .collect(Collectors.toList()), entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForTable( + @Nonnull OperationContext opContext, + EntityClient entityClient, + List rows, + String entityName, + Set aspectNames, + Function> extractDisplayName) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + opContext, + entityClient, + rows.stream() + .flatMap( + row -> + row.getCells().stream() + .filter(cell -> cell.getEntity() != null) + .map(Cell::getValue)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName); // For each urn, try to find it's name, use the urn if not found - rows.forEach(row -> row.getCells().forEach(cell -> { - if (cell.getEntity() != null) { - cell.setValue(urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); - } - })); + rows.forEach( + row -> + row.getCells() + .forEach( + cell -> { + if (cell.getEntity() != null) { + cell.setValue( + urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); + } + })); } - public static Map getUrnToDisplayName(EntityClient entityClient, List urns, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Set uniqueUrns = urns.stream().distinct().map(urnStr -> { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toSet()); - Map aspects = entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); - return aspects.entrySet() - .stream() - .map(entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) + public static void convertToUserInfoRows( + OperationContext opContext, EntityClient entityClient, List rows) throws Exception { + final Set userUrns = + rows.stream() + .filter(row -> !row.getCells().isEmpty()) + .map(row -> UrnUtils.getUrn(row.getCells().get(0).getValue())) + .collect(Collectors.toSet()); + final Map gmsResponseByUser = + entityClient.batchGetV2( + opContext, + CORP_USER_INFO_ASPECT_NAME, + userUrns, + ImmutableSet.of(CORP_USER_INFO_ASPECT_NAME)); + final Map urnToCorpUserInfo = + gmsResponseByUser.entrySet().stream() + .filter( + entry -> + entry.getValue() != null + && entry.getValue().getAspects().containsKey(CORP_USER_INFO_ASPECT_NAME)) + .collect( + Collectors.toMap( + Map.Entry::getKey, + entry -> + new CorpUserInfo( + entry + .getValue() + .getAspects() + .get(CORP_USER_INFO_ASPECT_NAME) + .getValue() + .data()))); + // Populate a row with the user link, title, and email. + rows.forEach( + row -> { + Urn urn = UrnUtils.getUrn(row.getCells().get(0).getValue()); + EntityResponse response = gmsResponseByUser.get(urn); + String maybeDisplayName = response != null ? getUserName(response).orElse(null) : null; + String maybeEmail = + urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getEmail() : null; + String maybeTitle = + urnToCorpUserInfo.containsKey(urn) ? urnToCorpUserInfo.get(urn).getTitle() : null; + if (maybeDisplayName != null) { + row.getCells().get(0).setValue(maybeDisplayName); + } + final List newCells = new ArrayList<>(); + // First add the user cell + newCells.add(row.getCells().get(0)); + // Then, add the title row. + newCells.add(new Cell(maybeTitle != null ? maybeTitle : "None", null, null)); + // Finally, add the email row. + newCells.add(new Cell(maybeEmail != null ? maybeEmail : "None", null, null)); + row.setCells(newCells); + }); + } + + public static Map getUrnToDisplayName( + @Nonnull OperationContext opContext, + EntityClient entityClient, + List urns, + String entityName, + Set aspectNames, + Function> extractDisplayName) + throws Exception { + Set uniqueUrns = + urns.stream() + .distinct() + .map( + urnStr -> { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + Map aspects = + entityClient.batchGetV2(opContext, entityName, uniqueUrns, aspectNames); + return aspects.entrySet().stream() + .map( + entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) .filter(pair -> pair.getValue().isPresent()) .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().get())); } public static Optional getDomainName(EntityResponse entityResponse) { - EnvelopedAspect domainProperties = entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + EnvelopedAspect domainProperties = + entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (domainProperties == null) { return Optional.empty(); } @@ -124,24 +256,67 @@ public static Optional getPlatformName(EntityResponse entityResponse) { EnvelopedAspect envelopedDataPlatformInfo = entityResponse.getAspects().get(Constants.DATA_PLATFORM_INFO_ASPECT_NAME); if (envelopedDataPlatformInfo == null) { + return Optional.of(entityResponse.getUrn().getId()); + } + DataPlatformInfo dataPlatformInfo = + new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + final String infoDisplayName = + dataPlatformInfo.getDisplayName() == null + ? dataPlatformInfo.getName() + : dataPlatformInfo.getDisplayName(); + return Optional.of(infoDisplayName != null ? infoDisplayName : entityResponse.getUrn().getId()); + } + + public static Optional getDatasetName(EntityResponse entityResponse) { + EnvelopedAspect envelopedDatasetProperties = + entityResponse.getAspects().get(Constants.DATASET_PROPERTIES_ASPECT_NAME); + if (envelopedDatasetProperties == null) { return Optional.empty(); } - DataPlatformInfo dataPlatformInfo = new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + DatasetProperties datasetProperties = + new DatasetProperties(envelopedDatasetProperties.getValue().data()); return Optional.of( - dataPlatformInfo.getDisplayName() == null ? dataPlatformInfo.getName() : dataPlatformInfo.getDisplayName()); + datasetProperties.hasName() + ? datasetProperties.getName() + : entityResponse.getUrn().getEntityKey().get(1)); } - public static Optional getDatasetName(EntityResponse entityResponse) { - EnvelopedAspect envelopedDatasetKey = entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); - if (envelopedDatasetKey == null) { + public static Optional getDashboardName(EntityResponse entityResponse) { + EnvelopedAspect envelopedDashboardName = + entityResponse.getAspects().get(Constants.DASHBOARD_INFO_ASPECT_NAME); + if (envelopedDashboardName == null) { return Optional.empty(); } - DatasetKey datasetKey = new DatasetKey(envelopedDatasetKey.getValue().data()); - return Optional.of(datasetKey.getName()); + DashboardInfo dashboardInfo = new DashboardInfo(envelopedDashboardName.getValue().data()); + return Optional.of(dashboardInfo.getTitle()); + } + + public static Optional getUserName(EntityResponse entityResponse) { + EnvelopedAspect envelopedCorpUserInfo = + entityResponse.getAspects().get(CORP_USER_INFO_ASPECT_NAME); + if (envelopedCorpUserInfo == null) { + return Optional.of(entityResponse.getUrn().getId()); + } + CorpUserInfo corpUserInfo = new CorpUserInfo(envelopedCorpUserInfo.getValue().data()); + final String userInfoName = + corpUserInfo.hasDisplayName() + ? corpUserInfo.getDisplayName() + : getUserFullName(corpUserInfo.getFirstName(), corpUserInfo.getLastName()); + return Optional.of(userInfoName != null ? userInfoName : entityResponse.getUrn().getId()); + } + + @Nullable + private static String getUserFullName( + @Nullable final String firstName, @Nullable final String lastName) { + if (firstName != null && lastName != null) { + return firstName + " " + lastName; + } + return null; } public static Optional getTermName(EntityResponse entityResponse) { - EnvelopedAspect envelopedTermInfo = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + EnvelopedAspect envelopedTermInfo = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); if (envelopedTermInfo != null) { GlossaryTermInfo glossaryTermInfo = new GlossaryTermInfo(envelopedTermInfo.getValue().data()); if (glossaryTermInfo.hasName()) { @@ -150,11 +325,13 @@ public static Optional getTermName(EntityResponse entityResponse) { } // if name is not set on GlossaryTermInfo or there is no GlossaryTermInfo - EnvelopedAspect envelopedGlossaryTermKey = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); + EnvelopedAspect envelopedGlossaryTermKey = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); if (envelopedGlossaryTermKey == null) { return Optional.empty(); } - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); + GlossaryTermKey glossaryTermKey = + new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); return Optional.of(glossaryTermKey.getName()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 03e63c7fb472fa..4fb49d79a0aa70 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,192 +1,406 @@ package com.linkedin.datahub.graphql.authorization; +import static com.datahub.authorization.AuthUtil.VIEW_RESTRICTED_ENTITY_TYPES; +import static com.datahub.authorization.AuthUtil.canViewEntity; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; +import static com.linkedin.metadata.authorization.PoliciesConfig.MANAGE_ACCESS_TOKENS; + import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; - -import java.time.Clock; +import io.datahubproject.metadata.context.OperationContext; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.List; -import java.util.Optional; +import java.util.Set; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.reflect.ConstructorUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.commons.lang3.reflect.MethodUtils; +import org.codehaus.plexus.util.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - +@Slf4j public class AuthorizationUtils { - private static final Clock CLOCK = Clock.systemUTC(); + private static final String GRAPHQL_GENERATED_PACKAGE = "com.linkedin.datahub.graphql.generated"; - public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp().setTime(CLOCK.millis()).setActor(UrnUtils.getUrn(context.getActorUrn())); - } + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } public static boolean canManagePolicies(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE) + || AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), MANAGE_ACCESS_TOKENS); } public static boolean canManageTokens(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(ACCESS_TOKEN_ENTITY_NAME)); } /** - * Returns true if the current used is able to create Domains. This is true if the user has the 'Manage Domains' or 'Create Domains' platform privilege. + * Returns true if the current used is able to create Domains. This is true if the user has the + * 'Manage Domains' or 'Create Domains' platform privilege. */ public static boolean canCreateDomains(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); + + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageDomains(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } /** - * Returns true if the current used is able to create Tags. This is true if the user has the 'Manage Tags' or 'Create Tags' platform privilege. + * Returns true if the current used is able to create Tags. This is true if the user has the + * 'Manage Tags' or 'Create Tags' platform privilege. */ public static boolean canCreateTags(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); + + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageTags(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized(context, Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); - } - - public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized(context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, - groupUrnStr, orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + public static boolean canEditGroupMembers( + @Nonnull String groupUrnStr, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( + return isAuthorized( context.getAuthorizer(), context.getActorUrn(), + CORP_GROUP_ENTITY_NAME, + groupUrnStr, orPrivilegeGroups); } - public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); + + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); + } - return AuthorizationUtils.isAuthorized( + public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( context.getAuthorizer(), context.getActorUrn(), - orPrivilegeGroups); + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); } public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } - public static boolean canEditEntityQueries(@Nonnull List entityUrns, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); - return entityUrns.stream().allMatch(entityUrn -> - isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups - )); + public static boolean canEditProperties(@Nonnull Urn targetUrn, @Nonnull QueryContext context) { + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PROPERTIES_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } + + public static boolean canEditEntityQueries( + @Nonnull List entityUrns, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); + return entityUrns.stream() + .allMatch( + entityUrn -> + isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups)); } - public static boolean canCreateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canCreateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to create a query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canUpdateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canUpdateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to update any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canDeleteQuery(@Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canDeleteQuery( + @Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to remove any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean isAuthorized( - @Nonnull QueryContext context, - @Nonnull Optional resourceSpec, - @Nonnull PoliciesConfig.Privilege privilege) { - final Authorizer authorizer = context.getAuthorizer(); - final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + /** + * Can view relationship logic goes here. Should be considered directionless for now. Or direction + * added to the interface. + * + * @param opContext + * @param a + * @param b + * @return + */ + public static boolean canViewRelationship( + @Nonnull OperationContext opContext, @Nonnull Urn a, @Nonnull Urn b) { + // TODO relationships filter + return true; } - public static boolean isAuthorized( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { - return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); + /* + * Optionally check view permissions against a list of urns if the config option is enabled + */ + public static boolean canView(@Nonnull OperationContext opContext, @Nonnull Urn urn) { + // if search authorization is disabled, skip the view permission check + if (opContext.getOperationContextConfig().getViewAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth() + && VIEW_RESTRICTED_ENTITY_TYPES.contains(urn.getEntityType())) { + + return opContext + .getViewAuthorizationContext() + .map( + viewAuthContext -> { + + // check cache + if (viewAuthContext.canView(Set.of(urn))) { + return true; + } + + if (!canViewEntity( + opContext.getSessionAuthentication().getActor().toUrnStr(), + opContext.getAuthorizerContext().getAuthorizer(), + urn)) { + return false; + } + + // cache viewable urn + viewAuthContext.addViewableUrns(Set.of(urn)); + return true; + }) + .orElse(false); + } + return true; + } + + public static T restrictEntity(@Nonnull Object entity, Class clazz) { + List allFields = FieldUtils.getAllFieldsList(entity.getClass()); + try { + Object[] args = + allFields.stream() + .map( + field -> { + // properties are often not required but only because + // they are a `one of` non-null. + // i.e. ChartProperties or ChartEditableProperties are required. + if (field.getAnnotation(javax.annotation.Nonnull.class) != null + || field.getName().toLowerCase().contains("properties") + || field.getType().isPrimitive()) { + try { + switch (field.getName()) { + // pass through to the restricted entity + case "name": + case "type": + case "urn": + case "chartId": + case "id": + case "jobId": + case "flowId": + Method fieldGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return fieldGetter.invoke(entity, (Object[]) null); + default: + switch (field.getType().getSimpleName()) { + case "boolean": + case "Boolean": + Method boolGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return Boolean.TRUE.equals( + boolGetter.invoke(entity, (Object[]) null)); + // mask these fields in the restricted entity + case "char": + case "String": + return ""; + case "short": + case "Short": + case "int": + case "Integer": + return 0; + case "long": + case "Long": + return 0L; + case "float": + case "Float": + return 0F; + case "double": + case "Double": + return 0D; + case "List": + return List.of(); + default: + if (Enum.class.isAssignableFrom(field.getType())) { + // pass through enum + Method enumGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return enumGetter.invoke(entity, (Object[]) null); + } else if (entity + .getClass() + .getPackage() + .getName() + .contains(GRAPHQL_GENERATED_PACKAGE)) { + // handle nested fields recursively + Method getter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + Object nestedEntity = getter.invoke(entity, (Object[]) null); + if (nestedEntity == null) { + return null; + } else { + return restrictEntity(nestedEntity, getter.getReturnType()); + } + } + log.error( + String.format( + "Failed to resolve non-null field: Object:%s Field:%s FieldType: %s", + entity.getClass().getName(), + field.getName(), + field.getType().getName())); + } + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + } + return (Object) null; + }) + .toArray(); + return ConstructorUtils.invokeConstructor(clazz, args); + } catch (NoSuchMethodException + | IllegalAccessException + | InvocationTargetException + | InstantiationException e) { + throw new RuntimeException(e); + } + } + + public static boolean canManageStructuredProperties(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_STRUCTURED_PROPERTIES_PRIVILEGE); + } + + public static boolean canManageForms(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_DOCUMENTATION_FORMS_PRIVILEGE); + } + + public static boolean canManageFeatures(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_FEATURES_PRIVILEGE); } public static boolean isAuthorized( @@ -194,13 +408,19 @@ public static boolean isAuthorized( @Nonnull String actor, @Nonnull String resourceType, @Nonnull String resource, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); - return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); + return AuthUtil.isAuthorized(authorizer, actor, privilegeGroup, resourceSpec); } - private AuthorizationUtils() { } + public static boolean isViewDatasetUsageAuthorized( + final QueryContext context, final Urn resourceUrn) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE, + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())); + } + private AuthorizationUtils() {} } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLConcurrencyUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLConcurrencyUtils.java new file mode 100644 index 00000000000000..25ed4face564dc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLConcurrencyUtils.java @@ -0,0 +1,35 @@ +package com.linkedin.datahub.graphql.concurrency; + +import com.codahale.metrics.MetricRegistry; +import com.linkedin.metadata.utils.metrics.MetricUtils; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.function.Supplier; + +public class GraphQLConcurrencyUtils { + private GraphQLConcurrencyUtils() {} + + private static ExecutorService graphQLExecutorService = null; + + public static ExecutorService getExecutorService() { + return GraphQLConcurrencyUtils.graphQLExecutorService; + } + + public static void setExecutorService(ExecutorService executorService) { + GraphQLConcurrencyUtils.graphQLExecutorService = executorService; + } + + public static CompletableFuture supplyAsync( + Supplier supplier, String caller, String task) { + MetricUtils.counter( + MetricRegistry.name( + GraphQLConcurrencyUtils.class.getSimpleName(), "supplyAsync", caller, task)) + .inc(); + if (GraphQLConcurrencyUtils.graphQLExecutorService == null) { + return CompletableFuture.supplyAsync(supplier); + } else { + return CompletableFuture.supplyAsync( + supplier, GraphQLConcurrencyUtils.graphQLExecutorService); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLWorkerPoolThreadFactory.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLWorkerPoolThreadFactory.java new file mode 100644 index 00000000000000..bae492b85920b3 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/concurrency/GraphQLWorkerPoolThreadFactory.java @@ -0,0 +1,29 @@ +package com.linkedin.datahub.graphql.concurrency; + +import java.util.concurrent.ThreadFactory; +import java.util.concurrent.atomic.AtomicLong; + +public class GraphQLWorkerPoolThreadFactory implements ThreadFactory { + + private static final AtomicLong THREAD_INIT_NUMBER = new AtomicLong(); + public static final String GRAPHQL_THREAD_POOL_GROUP_NAME = "graphQLThreadGroup"; + public static final ThreadGroup GRAPHQL_THREAD_POOL_GROUP = + new ThreadGroup(GRAPHQL_THREAD_POOL_GROUP_NAME); + + private static long nextThreadNum() { + return THREAD_INIT_NUMBER.getAndIncrement(); + } + + private long stackSize; + + public GraphQLWorkerPoolThreadFactory(long stackSize) { + this.stackSize = stackSize; + } + + @Override + public final Thread newThread(Runnable runnable) { + + return new Thread( + GRAPHQL_THREAD_POOL_GROUP, runnable, "GraphQLWorkerThread-" + nextThreadNum(), stackSize); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java index a09dc8741cd29c..69e0ed0625b2fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthenticationException extends GraphQLException { - public AuthenticationException(String message) { - super(message); - } + public AuthenticationException(String message) { + super(message); + } - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } + public AuthenticationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java index 803af09e079d1a..30568e45938c15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.exception; - -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthorizationException extends DataHubGraphQLException { public AuthorizationException(String message) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 8d3f5d5cea9eb3..48c57b5c641576 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -6,40 +6,57 @@ import graphql.execution.DataFetcherExceptionHandlerResult; import graphql.execution.ResultPath; import graphql.language.SourceLocation; +import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; @PublicApi @Slf4j public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { + private static final String DEFAULT_ERROR_MESSAGE = "An unknown error occurred."; + @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public CompletableFuture handleException( + DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); - log.error("Failed to execute DataFetcher", exception); - DataHubGraphQLErrorCode errorCode = DataHubGraphQLErrorCode.SERVER_ERROR; - String message = "An unknown error occurred."; + String message = DEFAULT_ERROR_MESSAGE; - // note: make sure to access the true error message via `getCause()` - if (exception.getCause() instanceof IllegalArgumentException) { + IllegalArgumentException illException = + findFirstThrowableCauseOfClass(exception, IllegalArgumentException.class); + if (illException != null) { + log.error("Failed to execute", illException); errorCode = DataHubGraphQLErrorCode.BAD_REQUEST; - message = exception.getCause().getMessage(); + message = illException.getMessage(); } - if (exception instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception).errorCode(); - message = exception.getMessage(); + DataHubGraphQLException graphQLException = + findFirstThrowableCauseOfClass(exception, DataHubGraphQLException.class); + if (graphQLException != null) { + log.error("Failed to execute", graphQLException); + errorCode = graphQLException.errorCode(); + message = graphQLException.getMessage(); } - if (exception.getCause() instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception.getCause()).errorCode(); - message = exception.getCause().getMessage(); + if (illException == null && graphQLException == null) { + log.error("Failed to execute", exception); } - DataHubGraphQLError error = new DataHubGraphQLError(message, path, sourceLocation, errorCode); - return DataFetcherExceptionHandlerResult.newResult().error(error).build(); + return CompletableFuture.completedFuture( + DataFetcherExceptionHandlerResult.newResult().error(error).build()); + } + + T findFirstThrowableCauseOfClass(Throwable throwable, Class clazz) { + while (throwable != null) { + if (clazz.isInstance(throwable)) { + return (T) throwable; + } else { + throwable = throwable.getCause(); + } + } + return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index 15c539a608cc05..f007a8b7c7adbe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.exception; +import static graphql.Assert.*; + import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -11,9 +13,6 @@ import java.util.List; import java.util.Map; -import static graphql.Assert.*; - - @PublicApi public class DataHubGraphQLError implements GraphQLError { @@ -23,7 +22,11 @@ public class DataHubGraphQLError implements GraphQLError { private final List locations; private final Map extensions; - public DataHubGraphQLError(String message, ResultPath path, SourceLocation sourceLocation, DataHubGraphQLErrorCode errorCode) { + public DataHubGraphQLError( + String message, + ResultPath path, + SourceLocation sourceLocation, + DataHubGraphQLErrorCode errorCode) { this.path = assertNotNull(path).toList(); this.errorCode = assertNotNull(errorCode); this.locations = Collections.singletonList(sourceLocation); @@ -90,4 +93,3 @@ public int hashCode() { return GraphqlErrorHelper.hashCode(this); } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java index 3d3c54e2febb29..75096a8c4148e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java @@ -2,7 +2,6 @@ import graphql.GraphQLException; - public class DataHubGraphQLException extends GraphQLException { private final DataHubGraphQLErrorCode code; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java index 2ee9838af54287..87a1aebb02f2e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when an unexpected value is provided by the client. - */ +/** Exception thrown when an unexpected value is provided by the client. */ public class ValidationException extends GraphQLException { - public ValidationException(String message) { - super(message); - } + public ValidationException(String message) { + super(message); + } - public ValidationException(String message, Throwable cause) { - super(message, cause); - } + public ValidationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 4d6133f18df050..167515a13c4da2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import lombok.Data; - @Data public class FeatureFlags { private boolean showSimplifiedHomepageByDefault = false; @@ -13,8 +12,15 @@ public class FeatureFlags { private boolean readOnlyModeEnabled = false; private boolean showSearchFiltersV2 = false; private boolean showBrowseV2 = false; + private boolean platformBrowseV2 = false; private PreProcessHooks preProcessHooks; private boolean showAcrylInfo = false; + private boolean erModelRelationshipFeatureEnabled = false; private boolean showAccessManagement = false; private boolean nestedDomainsEnabled = false; + private boolean schemaFieldEntityFetchEnabled = false; + private boolean businessAttributeEntityEnabled = false; + private boolean dataContractsEnabled = false; + private boolean editableDatasetNameEnabled = false; + private boolean showSeparateSiblings = false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/instrumentation/DataHubFieldComplexityCalculator.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/instrumentation/DataHubFieldComplexityCalculator.java new file mode 100644 index 00000000000000..300f04cdaa0909 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/instrumentation/DataHubFieldComplexityCalculator.java @@ -0,0 +1,74 @@ +package com.linkedin.datahub.graphql.instrumentation; + +import graphql.analysis.FieldComplexityCalculator; +import graphql.analysis.FieldComplexityEnvironment; +import graphql.language.Field; +import graphql.language.FragmentSpread; +import graphql.language.Selection; +import graphql.language.SelectionSet; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DataHubFieldComplexityCalculator implements FieldComplexityCalculator { + + private static final String COUNT_ARG = "count"; + private static final String INPUT_ARG = "input"; + private static final String SEARCH_RESULTS_FIELD = "searchResults"; + private static final String ENTITY_FIELD = "entity"; + private static final String SEARCH_RESULT_FIELDS_FIELD = "searchResultFields"; + private static final String GRAPHQL_QUERY_TYPE = "Query"; + + @SuppressWarnings("rawtypes") + @Override + public int calculate(FieldComplexityEnvironment environment, int childComplexity) { + int complexity = 1; + Map args = environment.getArguments(); + if (args.containsKey(INPUT_ARG)) { + Map input = (Map) args.get(INPUT_ARG); + if (input.containsKey(COUNT_ARG) && (Integer) input.get(COUNT_ARG) > 1) { + Integer count = (Integer) input.get(COUNT_ARG); + Field field = environment.getField(); + complexity += countRecursiveLineageComplexity(count, field); + } + } + if (GRAPHQL_QUERY_TYPE.equals(environment.getParentType().getName())) { + log.info( + "Query complexity for query: {} is {}", + environment.getField().getName(), + complexity + childComplexity); + } + return complexity + childComplexity; + } + + private int countRecursiveLineageComplexity(Integer count, Field field) { + List subFields = field.getSelectionSet().getSelections(); + Optional searchResultsFieldsField = + subFields.stream() + .filter(selection -> selection instanceof Field) + .map(selection -> (Field) selection) + .filter(subField -> SEARCH_RESULTS_FIELD.equals(subField.getName())) + .map(Field::getSelectionSet) + .map(SelectionSet::getSelections) + .flatMap(List::stream) + .filter(selection -> selection instanceof Field) + .map(selection -> (Field) selection) + .filter(subField -> ENTITY_FIELD.equals(subField.getName())) + .map(Field::getSelectionSet) + .map(SelectionSet::getSelections) + .flatMap(List::stream) + .filter(selection -> selection instanceof FragmentSpread) + .map(selection -> (FragmentSpread) selection) + .filter(subField -> SEARCH_RESULT_FIELDS_FIELD.equals(subField.getName())) + .findFirst(); + if (searchResultsFieldsField.isPresent()) { + // This fragment includes 2 lineage queries, we account for this additional complexity by + // multiplying + // by the count of entities attempting to be returned + return 2 * count; + } + return 0; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java deleted file mode 100644 index e228cb8445c02f..00000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.linkedin.datahub.graphql.resolvers; - -import com.google.common.collect.ImmutableList; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.linkedin.metadata.authorization.PoliciesConfig; -import java.util.List; -import java.util.Optional; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; - -public class AuthUtils { - - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - public static boolean isAuthorized( - String principal, - List privilegeGroup, - Authorizer authorizer) { - for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = new AuthorizationRequest(principal, privilege, Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - if (AuthorizationResult.Type.DENY.equals(result.getType())) { - return false; - } - } - return true; - } - - - private AuthUtils() { } -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java index 2520b55c24e253..570ea322be7a53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java @@ -2,29 +2,28 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthenticationException; - import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - /** - * Checks whether the user is currently authenticated & if so delegates execution to a child resolver. + * Checks whether the user is currently authenticated & if so delegates execution to a child + * resolver. */ @Deprecated public final class AuthenticatedResolver implements DataFetcher { - private final DataFetcher _resolver; + private final DataFetcher _resolver; - public AuthenticatedResolver(final DataFetcher resolver) { - _resolver = resolver; - } + public AuthenticatedResolver(final DataFetcher resolver) { + _resolver = resolver; + } - @Override - public final T get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - if (context.isAuthenticated()) { - return _resolver.get(environment); - } - throw new AuthenticationException("Failed to authenticate the current user."); + @Override + public final T get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + if (context.isAuthenticated()) { + return _resolver.get(environment); } + throw new AuthenticationException("Failed to authenticate the current user."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 930c98ee7113ad..3126f25546f658 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -2,18 +2,17 @@ import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; public class BatchLoadUtils { - private BatchLoadUtils() { } + private BatchLoadUtils() {} public static CompletableFuture> batchLoadEntitiesOfSameType( List entities, @@ -24,12 +23,14 @@ public static CompletableFuture> batchLoadEntitiesOfSameType( } // Assume all entities are of the same type final com.linkedin.datahub.graphql.types.EntityType filteredEntity = - Iterables.getOnlyElement(entityTypes.stream() - .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) - .collect(Collectors.toList())); + Iterables.getOnlyElement( + entityTypes.stream() + .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) + .collect(Collectors.toList())); - final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); - List keyList = new ArrayList(); + final DataLoader loader = + dataLoaderRegistry.getDataLoader(filteredEntity.name()); + List keyList = new ArrayList(); for (Entity entity : entities) { keyList.add(filteredEntity.getKeyProvider().apply(entity)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java deleted file mode 100644 index b0f23e63177e60..00000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ /dev/null @@ -1,67 +0,0 @@ -package com.linkedin.datahub.graphql.resolvers; - -import com.google.common.collect.ImmutableMap; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.metadata.Constants; -import java.util.Map; -import java.util.stream.Collectors; -import javax.annotation.Nonnull; - - -/** - * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service Storage Entities - */ -public class EntityTypeMapper { - - static final Map ENTITY_TYPE_TO_NAME = - ImmutableMap.builder() - .put(EntityType.DATASET, "dataset") - .put(EntityType.ROLE, "role") - .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) - .put(EntityType.CORP_USER, "corpuser") - .put(EntityType.CORP_GROUP, "corpGroup") - .put(EntityType.DATA_PLATFORM, "dataPlatform") - .put(EntityType.DASHBOARD, "dashboard") - .put(EntityType.CHART, "chart") - .put(EntityType.TAG, "tag") - .put(EntityType.DATA_FLOW, "dataFlow") - .put(EntityType.DATA_JOB, "dataJob") - .put(EntityType.DATA_PROCESS_INSTANCE, Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME) - .put(EntityType.GLOSSARY_TERM, "glossaryTerm") - .put(EntityType.GLOSSARY_NODE, "glossaryNode") - .put(EntityType.MLMODEL, "mlModel") - .put(EntityType.MLMODEL_GROUP, "mlModelGroup") - .put(EntityType.MLFEATURE_TABLE, "mlFeatureTable") - .put(EntityType.MLFEATURE, "mlFeature") - .put(EntityType.MLPRIMARY_KEY, "mlPrimaryKey") - .put(EntityType.CONTAINER, "container") - .put(EntityType.DOMAIN, "domain") - .put(EntityType.NOTEBOOK, "notebook") - .put(EntityType.DATA_PLATFORM_INSTANCE, "dataPlatformInstance") - .put(EntityType.TEST, "test") - .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) - .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) - .build(); - - private static final Map ENTITY_NAME_TO_TYPE = - ENTITY_TYPE_TO_NAME.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); - - private EntityTypeMapper() { - } - - public static EntityType getType(String name) { - String lowercaseName = name.toLowerCase(); - if (!ENTITY_NAME_TO_TYPE.containsKey(lowercaseName)) { - throw new IllegalArgumentException("Unknown entity name: " + name); - } - return ENTITY_NAME_TO_TYPE.get(lowercaseName); - } - - @Nonnull - public static String getName(EntityType type) { - if (!ENTITY_TYPE_TO_NAME.containsKey(type)) { - throw new IllegalArgumentException("Unknown entity type: " + type); - } - return ENTITY_TYPE_TO_NAME.get(type); - } -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 02921b453e3154..1f4ebbb88bf1a6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,15 +1,22 @@ package com.linkedin.datahub.graphql.resolvers; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; +import static com.datahub.authorization.AuthUtil.isAuthorized; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.ApiGroup.ANALYTICS; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AuthenticatedUser; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.PlatformPrivileges; +import com.linkedin.datahub.graphql.resolvers.businessattribute.BusinessAttributeAuthorizationUtils; import com.linkedin.datahub.graphql.types.corpuser.mappers.CorpUserMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -19,21 +26,16 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Collections; -import java.util.Optional; +import java.util.List; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * GraphQL resolver responsible for resolving information about the currently - * logged in User, including - * - * 1. User profile information - * 2. User privilege information, i.e. which features to display in the UI. + * GraphQL resolver responsible for resolving information about the currently logged in User, + * including * + *

1. User profile information 2. User privilege information, i.e. which features to display in + * the UI. */ public class MeResolver implements DataFetcher> { @@ -48,115 +50,125 @@ public MeResolver(final EntityClient entityClient, final FeatureFlags featureFla @Override public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - // 1. Get currently logged in user profile. - final Urn userUrn = Urn.createFromString(context.getActorUrn()); - final EntityResponse gmsUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - Collections.singleton(userUrn), null, context.getAuthentication()).get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); - - // 2. Get platform privileges - final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); - platformPrivileges.setViewAnalytics(canViewAnalytics(context)); - platformPrivileges.setManagePolicies(canManagePolicies(context)); - platformPrivileges.setManageIdentities(canManageUsersGroups(context)); - platformPrivileges.setGeneratePersonalAccessTokens(canGeneratePersonalAccessToken(context)); - platformPrivileges.setManageDomains(canManageDomains(context)); - platformPrivileges.setManageIngestion(canManageIngestion(context)); - platformPrivileges.setManageSecrets(canManageSecrets(context)); - platformPrivileges.setManageTokens(canManageTokens(context)); - platformPrivileges.setManageTests(canManageTests(context)); - platformPrivileges.setManageGlossaries(canManageGlossaries(context)); - platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); - platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); - platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); - platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); - platformPrivileges.setManageGlobalViews(AuthorizationUtils.canManageGlobalViews(context)); - platformPrivileges.setManageOwnershipTypes(AuthorizationUtils.canManageOwnershipTypes(context)); - platformPrivileges.setManageGlobalAnnouncements(AuthorizationUtils.canManageGlobalAnnouncements(context)); - - // Construct and return authenticated user object. - final AuthenticatedUser authUser = new AuthenticatedUser(); - authUser.setCorpUser(corpUser); - authUser.setPlatformPrivileges(platformPrivileges); - return authUser; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to fetch authenticated user!", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // 1. Get currently logged in user profile. + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + final EntityResponse gmsUser = + _entityClient + .batchGetV2( + context.getOperationContext(), + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + null) + .get(userUrn); + final CorpUser corpUser = CorpUserMapper.map(context, gmsUser, _featureFlags); + + // 2. Get platform privileges + final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); + platformPrivileges.setViewAnalytics(canViewAnalytics(context)); + platformPrivileges.setManagePolicies(canManagePolicies(context)); + platformPrivileges.setManageIdentities(canManageUsersGroups(context)); + platformPrivileges.setGeneratePersonalAccessTokens( + canGeneratePersonalAccessToken(context)); + platformPrivileges.setManageDomains(canManageDomains(context)); + platformPrivileges.setManageIngestion(canManageIngestion(context)); + platformPrivileges.setManageSecrets(canManageSecrets(context)); + platformPrivileges.setManageTokens(canManageTokens(context)); + platformPrivileges.setViewTests(canViewTests(context)); + platformPrivileges.setManageTests(canManageTests(context)); + platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); + platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); + platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); + platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); + platformPrivileges.setManageGlobalViews( + AuthorizationUtils.canManageGlobalViews(context)); + platformPrivileges.setManageOwnershipTypes( + AuthorizationUtils.canManageOwnershipTypes(context)); + platformPrivileges.setManageGlobalAnnouncements( + AuthorizationUtils.canManageGlobalAnnouncements(context)); + platformPrivileges.setCreateBusinessAttributes( + BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context)); + platformPrivileges.setManageBusinessAttributes( + BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context)); + // Construct and return authenticated user object. + final AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setCorpUser(corpUser); + authUser.setPlatformPrivileges(platformPrivileges); + return authUser; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch authenticated user!", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - /** - * Returns true if the authenticated user has privileges to view analytics. - */ + /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized(context.getActorUrn(), context.getAuthorizer(), ANALYTICS, READ); } - /** - * Returns true if the authenticated user has privileges to manage policies analytics. - */ + /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } - /** - * Returns true if the authenticated user has privileges to manage users & groups. - */ + /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } - /** - * Returns true if the authenticated user has privileges to generate personal access tokens - */ + /** Returns true if the authenticated user has privileges to generate personal access tokens */ private boolean canGeneratePersonalAccessToken(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + } + + /** Returns true if the authenticated user has privileges to view tests. */ + private boolean canViewTests(final QueryContext context) { + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage (add or remove) tests. - */ + /** Returns true if the authenticated user has privileges to manage (add or remove) tests. */ private boolean canManageTests(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage domains - */ + /** Returns true if the authenticated user has privileges to manage domains */ private boolean canManageDomains(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage access tokens - */ + /** Returns true if the authenticated user has privileges to manage access tokens */ private boolean canManageTokens(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); } - /** - * Returns true if the authenticated user has privileges to manage glossaries - */ + /** Returns true if the authenticated user has privileges to manage glossaries */ private boolean canManageGlossaries(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage user credentials - */ + /** Returns true if the authenticated user has privileges to manage user credentials */ private boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - - /** - * Returns true if the provided actor is authorized for a particular privilege, false otherwise. - */ - private boolean isAuthorized(final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - return AuthorizationResult.Type.ALLOW.equals(result.getType()); - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 2c2e71ee92eaa4..3617eb47259797 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -1,15 +1,21 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -18,7 +24,10 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.utils.ESUtils; import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.service.ViewService; +import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -30,184 +39,235 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.metadata.Constants.*; +public class ResolverUtils { + private static final Set KEYWORD_EXCLUDED_FILTERS = + ImmutableSet.of("runId", "_entityType"); + private static final ObjectMapper MAPPER = new ObjectMapper(); -public class ResolverUtils { + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } - private static final Set KEYWORD_EXCLUDED_FILTERS = ImmutableSet.of( - "runId", - "_entityType" - ); - private static final ObjectMapper MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); - } + private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); - private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); + private ResolverUtils() {} - private ResolverUtils() { } + @Nonnull + public static T bindArgument(Object argument, Class clazz) { + return MAPPER.convertValue(argument, clazz); + } - @Nonnull - public static T bindArgument(Object argument, Class clazz) { - return MAPPER.convertValue(argument, clazz); + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; - } + @Nonnull + public static Authentication getAuthentication(DataFetchingEnvironment environment) { + return ((QueryContext) environment.getContext()).getAuthentication(); + } - @Nonnull - public static Authentication getAuthentication(DataFetchingEnvironment environment) { - return ((QueryContext) environment.getContext()).getAuthentication(); + /** + * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure + * that it is matched against a keyword filter in ElasticSearch. + * @param facetFilterInputs The list of facet filters inputs + * @param validFacetFields The set of valid fields against which to filter for. + * @return A map of filter definitions to be used in ElasticSearch. + */ + @Nonnull + public static Map buildFacetFilters( + @Nullable List facetFilterInputs, @Nonnull Set validFacetFields) { + if (facetFilterInputs == null) { + return Collections.emptyMap(); } - /** - * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure - * that it is matched against a keyword filter in ElasticSearch. - * - * @param facetFilterInputs The list of facet filters inputs - * @param validFacetFields The set of valid fields against which to filter for. - * @return A map of filter definitions to be used in ElasticSearch. - */ - @Nonnull - public static Map buildFacetFilters(@Nullable List facetFilterInputs, - @Nonnull Set validFacetFields) { - if (facetFilterInputs == null) { - return Collections.emptyMap(); - } - - final Map facetFilters = new HashMap<>(); - - facetFilterInputs.forEach(facetFilterInput -> { - if (!validFacetFields.contains(facetFilterInput.getField())) { - throw new ValidationException(String.format("Unrecognized facet with name %s provided", facetFilterInput.getField())); - } - if (!facetFilterInput.getValues().isEmpty()) { - facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); - } + final Map facetFilters = new HashMap<>(); + + facetFilterInputs.forEach( + facetFilterInput -> { + if (!validFacetFields.contains(facetFilterInput.getField())) { + throw new ValidationException( + String.format( + "Unrecognized facet with name %s provided", facetFilterInput.getField())); + } + if (!facetFilterInput.getValues().isEmpty()) { + facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); + } }); - return facetFilters; - } + return facetFilters; + } - public static List criterionListFromAndFilter(List andFilters) { - return andFilters != null && !andFilters.isEmpty() - ? andFilters.stream() - .map(filter -> criterionFromFilter(filter)) - .collect(Collectors.toList()) : Collections.emptyList(); + public static List criterionListFromAndFilter( + List andFilters, @Nullable AspectRetriever aspectRetriever) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() + .map(filter -> criterionFromFilter(filter, aspectRetriever)) + .collect(Collectors.toList()) + : Collections.emptyList(); + } + + // In the case that user sends filters to be or-d together, we need to build a series of + // conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List orFilters, @Nullable AspectRetriever aspectRetriever) { + return new ConjunctiveCriterionArray( + orFilters.stream() + .map( + orFilter -> { + CriterionArray andCriterionForOr = + new CriterionArray( + criterionListFromAndFilter(orFilter.getAnd(), aspectRetriever)); + return new ConjunctiveCriterion().setAnd(andCriterionForOr); + }) + .collect(Collectors.toList())); + } + + @Nullable + public static Filter buildFilter( + @Nullable List andFilters, + @Nullable List orFilters, + @Nullable AspectRetriever aspectRetriever) { + if ((andFilters == null || andFilters.isEmpty()) + && (orFilters == null || orFilters.isEmpty())) { + return null; + } + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters, aspectRetriever)); } - // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion - // arrays, rather than just one for the AND case. - public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( - @Nonnull List orFilters - ) { - return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { - CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); - return new ConjunctiveCriterion().setAnd( - andCriterionForOr - ); - } - ).collect(Collectors.toList())); + // If or filters are not set, someone may be using the legacy and filters + final List andCriterions = criterionListFromAndFilter(andFilters, aspectRetriever); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + public static Criterion criterionFromFilter( + final FacetFilterInput filter, @Nullable AspectRetriever aspectRetriever) { + return criterionFromFilter(filter, false, aspectRetriever); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter( + final FacetFilterInput filter, + final Boolean skipKeywordSuffix, + @Nullable AspectRetriever aspectRetriever) { + Criterion result = new Criterion(); + + if (skipKeywordSuffix) { + result.setField(filter.getField()); + } else { + result.setField(getFilterField(filter.getField(), skipKeywordSuffix, aspectRetriever)); } - @Nullable - public static Filter buildFilter(@Nullable List andFilters, @Nullable List orFilters) { - if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { - return null; - } - - // Or filters are the new default. We will check them first. - // If we have OR filters, we need to build a series of CriterionArrays - if (orFilters != null && !orFilters.isEmpty()) { - return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); - } - - // If or filters are not set, someone may be using the legacy and filters - final List andCriterions = criterionListFromAndFilter(andFilters); - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + // `value` is deprecated in place of `values`- this is to support old query patterns. If values + // is provided, + // this statement will be skipped + if (filter.getValues() == null && filter.getValue() != null) { + result.setValues(new StringArray(filter.getValue())); + result.setValue(filter.getValue()); + } else if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } else { + result.setValues(new StringArray()); + result.setValue(""); } - public static Criterion criterionFromFilter(final FacetFilterInput filter) { - return criterionFromFilter(filter, false); + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); } - // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) - public static Criterion criterionFromFilter(final FacetFilterInput filter, final Boolean skipKeywordSuffix) { - Criterion result = new Criterion(); - - if (skipKeywordSuffix) { - result.setField(filter.getField()); - } else { - result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); - } - - // `value` is deprecated in place of `values`- this is to support old query patterns. If values is provided, - // this statement will be skipped - if (filter.getValues() == null && filter.getValue() != null) { - result.setValues(new StringArray(filter.getValue())); - result.setValue(filter.getValue()); - } else if (filter.getValues() != null) { - result.setValues(new StringArray(filter.getValues())); - if (!filter.getValues().isEmpty()) { - result.setValue(filter.getValues().get(0)); - } else { - result.setValue(""); - } - } else { - result.setValues(new StringArray()); - result.setValue(""); - } - - - if (filter.getCondition() != null) { - result.setCondition(Condition.valueOf(filter.getCondition().toString())); - } else { - result.setCondition(Condition.EQUAL); - } - - if (filter.getNegated() != null) { - result.setNegated(filter.getNegated()); - } - - return result; + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); } - private static String getFilterField(final String originalField, final boolean skipKeywordSuffix) { - if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { - return originalField; - } - return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + return result; + } + + private static String getFilterField( + final String originalField, + final boolean skipKeywordSuffix, + @Nullable AspectRetriever aspectRetriever) { + if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { + return originalField; } + return ESUtils.toKeywordField(originalField, skipKeywordSuffix, aspectRetriever); + } - public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") + public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); - if (inputFilters == null) { - return QueryUtils.newFilter(urnMatchCriterion); - } - - // Add urn match criterion to each or clause - if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { - for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { - conjunctiveCriterion.getAnd().add(urnMatchCriterion); - } - return inputFilters; - } - return QueryUtils.newFilter(urnMatchCriterion); + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + if (inputFilters == null) { + return QueryUtils.newFilter(urnMatchCriterion); + } + + // Add urn match criterion to each or clause + if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { + for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { + conjunctiveCriterion.getAnd().add(urnMatchCriterion); + } + return inputFilters; + } + return QueryUtils.newFilter(urnMatchCriterion); + } + + public static Filter viewFilter( + OperationContext opContext, ViewService viewService, String viewUrn) { + if (viewUrn == null) { + return null; + } + DataHubViewInfo viewInfo = resolveView(opContext, viewService, UrnUtils.getUrn(viewUrn)); + if (viewInfo == null) { + return null; + } + Filter result = SearchUtils.combineFilters(null, viewInfo.getDefinition().getFilter()); + return result; + } + + /** + * Simply resolves the end time filter for the search across lineage query. If the start time is + * provided, but end time is not provided, we will default to the current time. + */ + public static Long getLineageEndTimeMillis( + @Nullable Long startTimeMillis, @Nullable Long endTimeMillis) { + if (endTimeMillis != null) { + return endTimeMillis; + } + if (startTimeMillis != null) { + return System.currentTimeMillis(); } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index b5b13cc00b40d0..0e9d2cea611416 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.AssertionResultType; import com.linkedin.datahub.graphql.generated.AssertionRunEvent; @@ -12,6 +15,7 @@ import com.linkedin.datahub.graphql.types.dataset.mappers.AssertionRunEventMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -26,13 +30,9 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * GraphQL Resolver used for fetching AssertionRunEvents. - */ -public class AssertionRunEventResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching AssertionRunEvents. */ +public class AssertionRunEventResolver + implements DataFetcher> { private final EntityClient _client; @@ -42,58 +42,91 @@ public AssertionRunEventResolver(final EntityClient client) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String urn = ((Assertion) environment.getSource()).getUrn(); - final String maybeStatus = environment.getArgumentOrDefault("status", null); - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; + final String urn = ((Assertion) environment.getSource()).getUrn(); + final String maybeStatus = environment.getArgumentOrDefault("status", null); + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; - try { - // Step 1: Fetch aspects from GMS - List aspects = _client.getTimeseriesAspectValues( - urn, - Constants.ASSERTION_ENTITY_NAME, - Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - buildFilter(maybeFilters, maybeStatus), - context.getAuthentication()); + try { + // Step 1: Fetch aspects from GMS + List aspects = + _client.getTimeseriesAspectValues( + context.getOperationContext(), + urn, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilter( + maybeFilters, + maybeStatus, + context.getOperationContext().getAspectRetriever())); - // Step 2: Bind profiles into GraphQL strong types. - List runEvents = aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + // Step 2: Bind profiles into GraphQL strong types. + List runEvents = + aspects.stream() + .map(a -> AssertionRunEventMapper.map(context, a)) + .collect(Collectors.toList()); - // Step 3: Package and return response. - final AssertionRunEventsResult result = new AssertionRunEventsResult(); - result.setTotal(runEvents.size()); - result.setFailed(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.FAILURE.equals( - runEvent.getResult().getType() - )).count())); - result.setSucceeded(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.SUCCESS.equals(runEvent.getResult().getType() - )).count())); - result.setRunEvents(runEvents); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 3: Package and return response. + final AssertionRunEventsResult result = new AssertionRunEventsResult(); + result.setTotal(runEvents.size()); + result.setFailed( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.FAILURE.equals( + runEvent.getResult().getType())) + .count())); + result.setSucceeded( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.SUCCESS.equals( + runEvent.getResult().getType())) + .count())); + result.setErrored( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.ERROR.equals( + runEvent.getResult().getType())) + .count())); + result.setRunEvents(runEvents); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } @Nullable - public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final String status) { + public static Filter buildFilter( + @Nullable FilterInput filtersInput, + @Nullable final String status, + @Nullable AspectRetriever aspectRetriever) { if (filtersInput == null && status == null) { return null; } @@ -107,8 +140,14 @@ public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable f if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + facetFilters.stream() + .map(filter -> criterionFromFilter(filter, true, aspectRetriever)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionUtils.java new file mode 100644 index 00000000000000..757ff38de60065 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionUtils.java @@ -0,0 +1,27 @@ +package com.linkedin.datahub.graphql.resolvers.assertion; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.metadata.authorization.PoliciesConfig; + +public class AssertionUtils { + public static boolean isAuthorizedToEditAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthorizationUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + asserteeUrn.getEntityType(), + asserteeUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 8006ae7d2a464c..1cf233221d4d33 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,14 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -19,63 +21,79 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver that deletes an Assertion. - */ +/** GraphQL Resolver that deletes an Assertion. */ @Slf4j -public class DeleteAssertionResolver implements DataFetcher> { +public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; - public DeleteAssertionResolver(final EntityClient entityClient, final EntityService entityService) { + public DeleteAssertionResolver( + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn assertionUrn = Urn.createFromString(environment.getArgument("urn")); - return CompletableFuture.supplyAsync(() -> { - - // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { - return true; - } - - if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { - try { - _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(assertionUrn, context.getAuthentication()); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for assertion with urn %s", assertionUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + // 1. check the entity exists. If not, return false. + if (!_entityService.exists(context.getOperationContext(), assertionUrn, true)) { return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against assertion with urn %s", assertionUrn), e); } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + + if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { + try { + _entityClient.deleteEntity(context.getOperationContext(), assertionUrn); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + context.getOperationContext(), assertionUrn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for assertion with urn %s", + assertionUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against assertion with urn %s", assertionUrn), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } - /** - * Determine whether the current user is allowed to remove an assertion. - */ - private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final Urn assertionUrn) { + /** Determine whether the current user is allowed to remove an assertion. */ + private boolean isAuthorizedToDeleteAssertion( + final QueryContext context, final Urn assertionUrn) { // 2. fetch the assertion info AssertionInfo info = - (AssertionInfo) EntityUtils.getAspectFromEntity( - assertionUrn.toString(), Constants.ASSERTION_INFO_ASPECT_NAME, _entityService, null); + (AssertionInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + assertionUrn.toString(), + Constants.ASSERTION_INFO_ASPECT_NAME, + _entityService, + null); if (info != null) { // 3. check whether the actor has permission to edit the assertions on the assertee @@ -86,11 +104,14 @@ private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final return true; } - private boolean isAuthorizedToDeleteAssertionFromAssertee(final QueryContext context, final Urn asserteeUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToDeleteAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -104,7 +125,8 @@ private Urn getAsserteeUrnFromInfo(final AssertionInfo info) { case DATASET: return info.getDatasetAssertion().getDataset(); default: - throw new RuntimeException(String.format("Unsupported Assertion Type %s provided", info.getType())); + throw new RuntimeException( + String.format("Unsupported Assertion Type %s provided", info.getType())); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index ff573bb59fba13..a56d1cdd2a02b0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -4,7 +4,9 @@ import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityAssertionsResult; @@ -25,12 +27,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of Assertions associated with an Entity. - */ -public class EntityAssertionsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +@Slf4j +public class EntityAssertionsResolver + implements DataFetcher> { private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; @@ -44,54 +46,78 @@ public EntityAssertionsResolver(final EntityClient entityClient, final GraphClie @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 200); + final Boolean includeSoftDeleted = + environment.getArgumentOrDefault("includeSoftDeleted", false); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 200); + try { + // Step 1: Fetch set of assertions associated with the target entity from the Graph + // Store + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + start, + count, + context.getActorUrn()); - try { - // Step 1: Fetch set of assertions associated with the target entity from the Graph Store - final EntityRelationships relationships = _graphClient.getRelatedEntities( - entityUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - start, - count, - context.getActorUrn() - ); + final List assertionUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); - final List assertionUrns = relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + // Step 2: Hydrate the assertion entities based on the urns from step 1 + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + null); - // Step 2: Hydrate the assertion entities based on the urns from step 1 - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS assertion model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List assertions = + gmsResults.stream() + .filter(Objects::nonNull) + .map(r -> AssertionMapper.map(context, r)) + .filter(assertion -> assertionExists(assertion, includeSoftDeleted, context)) + .collect(Collectors.toList()); - // Step 3: Map GMS assertion model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List assertions = gmsResults.stream() - .filter(Objects::nonNull) - .map(AssertionMapper::map) - .collect(Collectors.toList()); + // Step 4: Package and return result + final EntityAssertionsResult result = new EntityAssertionsResult(); + result.setCount(relationships.getCount()); + result.setStart(relationships.getStart()); + result.setTotal(relationships.getTotal()); + result.setAssertions(assertions); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } - // Step 4: Package and return result - final EntityAssertionsResult result = new EntityAssertionsResult(); - result.setCount(relationships.getCount()); - result.setStart(relationships.getStart()); - result.setTotal(relationships.getTotal()); - result.setAssertions(assertions); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + private boolean assertionExists( + Assertion assertion, Boolean includeSoftDeleted, QueryContext context) { + try { + return _entityClient.exists( + context.getOperationContext(), UrnUtils.getUrn(assertion.getUrn()), includeSoftDeleted); + } catch (RemoteInvocationException e) { + log.error( + String.format("Unable to check if assertion %s exists, ignoring it", assertion.getUrn()), + e); + return false; + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolver.java new file mode 100644 index 00000000000000..b720aa11a8bdc7 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolver.java @@ -0,0 +1,117 @@ +package com.linkedin.datahub.graphql.resolvers.assertion; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +import com.linkedin.assertion.AssertionResult; +import com.linkedin.assertion.AssertionResultError; +import com.linkedin.assertion.AssertionResultErrorType; +import com.linkedin.assertion.AssertionResultType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.data.template.StringMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AssertionResultInput; +import com.linkedin.datahub.graphql.generated.StringMapEntryInput; +import com.linkedin.metadata.service.AssertionService; +import graphql.execution.DataFetcherExceptionHandler; +import graphql.execution.DataFetcherResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class ReportAssertionResultResolver implements DataFetcher> { + + public static final String ERROR_MESSAGE_KEY = "message"; + private final AssertionService _assertionService; + + public ReportAssertionResultResolver(AssertionService assertionService) { + _assertionService = assertionService; + } + + /** + * This is called by the graphql engine to fetch the value. The {@link DataFetchingEnvironment} is + * a composite context object that tells you all you need to know about how to fetch a data value + * in graphql type terms. + * + * @param environment this is the data fetching environment which contains all the context you + * need to fetch a value + * @return a value of type T. May be wrapped in a {@link DataFetcherResult} + * @throws Exception to relieve the implementations from having to wrap checked exceptions. Any + * exception thrown from a {@code DataFetcher} will eventually be handled by the registered + * {@link DataFetcherExceptionHandler} and the related field will have a value of {@code null} + * in the result. + */ + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final Urn assertionUrn = UrnUtils.getUrn(environment.getArgument("urn")); + final AssertionResultInput input = + bindArgument(environment.getArgument("result"), AssertionResultInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final Urn asserteeUrn = + _assertionService.getEntityUrnForAssertion( + context.getOperationContext(), assertionUrn); + if (asserteeUrn == null) { + throw new RuntimeException( + String.format( + "Failed to report Assertion Run Event. Assertion with urn %s does not exist or is not associated with any entity.", + assertionUrn)); + } + + // Check whether the current user is allowed to update the assertion. + if (AssertionUtils.isAuthorizedToEditAssertionFromAssertee(context, asserteeUrn)) { + AssertionResult assertionResult = mapAssertionResult(input); + _assertionService.addAssertionRunEvent( + context.getOperationContext(), + assertionUrn, + asserteeUrn, + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(), + assertionResult); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } + + private static StringMap mapContextParameters(List input) { + + if (input == null || input.isEmpty()) { + return null; + } + StringMap entries = new StringMap(); + input.forEach(entry -> entries.put(entry.getKey(), entry.getValue())); + return entries; + } + + private AssertionResult mapAssertionResult(AssertionResultInput input) { + AssertionResult assertionResult = new AssertionResult(); + assertionResult.setType(AssertionResultType.valueOf(input.getType().toString())); + assertionResult.setExternalUrl(input.getExternalUrl(), SetMode.IGNORE_NULL); + if (assertionResult.getType() == AssertionResultType.ERROR && input.getError() != null) { + assertionResult.setError(mapAssertionResultError(input)); + } + if (input.getProperties() != null) { + assertionResult.setNativeResults(mapContextParameters(input.getProperties())); + } + return assertionResult; + } + + private static AssertionResultError mapAssertionResultError(AssertionResultInput input) { + AssertionResultError error = new AssertionResultError(); + error.setType(AssertionResultErrorType.valueOf(input.getError().getType().toString())); + StringMap errorProperties = new StringMap(); + errorProperties.put(ERROR_MESSAGE_KEY, input.getError().getMessage()); + error.setProperties(errorProperties); + return error; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolver.java new file mode 100644 index 00000000000000..026f486e32c116 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolver.java @@ -0,0 +1,108 @@ +package com.linkedin.datahub.graphql.resolvers.assertion; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.assertion.CustomAssertionInfo; +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.PlatformInput; +import com.linkedin.datahub.graphql.generated.UpsertCustomAssertionInput; +import com.linkedin.datahub.graphql.types.assertion.AssertionMapper; +import com.linkedin.metadata.key.DataPlatformKey; +import com.linkedin.metadata.service.AssertionService; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.SchemaFieldUtils; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class UpsertCustomAssertionResolver implements DataFetcher> { + + private final AssertionService _assertionService; + + public UpsertCustomAssertionResolver(@Nonnull final AssertionService assertionService) { + _assertionService = Objects.requireNonNull(assertionService, "assertionService is required"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final String maybeAssertionUrn = environment.getArgument("urn"); + final UpsertCustomAssertionInput input = + bindArgument(environment.getArgument("input"), UpsertCustomAssertionInput.class); + + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + final Urn assertionUrn; + + if (maybeAssertionUrn == null) { + assertionUrn = _assertionService.generateAssertionUrn(); + } else { + assertionUrn = UrnUtils.getUrn(maybeAssertionUrn); + } + + return CompletableFuture.supplyAsync( + () -> { + // Check whether the current user is allowed to update the assertion. + if (AssertionUtils.isAuthorizedToEditAssertionFromAssertee(context, entityUrn)) { + _assertionService.upsertCustomAssertion( + context.getOperationContext(), + assertionUrn, + entityUrn, + input.getDescription(), + input.getExternalUrl(), + mapAssertionPlatform(input.getPlatform()), + createCustomAssertionInfo(input, entityUrn)); + + return AssertionMapper.map( + context, + _assertionService.getAssertionEntityResponse( + context.getOperationContext(), assertionUrn)); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } + + @SneakyThrows + private DataPlatformInstance mapAssertionPlatform(PlatformInput platformInput) { + DataPlatformInstance platform = new DataPlatformInstance(); + if (platformInput.getUrn() != null) { + platform.setPlatform(Urn.createFromString(platformInput.getUrn())); + } else if (platformInput.getName() != null) { + platform.setPlatform( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(platformInput.getName()), + DATA_PLATFORM_ENTITY_NAME)); + } else { + throw new IllegalArgumentException( + "Failed to upsert Custom Assertion. Platform Name or Platform Urn must be specified."); + } + + return platform; + } + + private CustomAssertionInfo createCustomAssertionInfo( + UpsertCustomAssertionInput input, Urn entityUrn) { + CustomAssertionInfo customAssertionInfo = new CustomAssertionInfo(); + customAssertionInfo.setType(input.getType()); + customAssertionInfo.setEntity(entityUrn); + customAssertionInfo.setLogic(input.getLogic(), SetMode.IGNORE_NULL); + + if (input.getFieldPath() != null) { + customAssertionInfo.setField( + SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath())); + } + return customAssertionInfo; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java index 8f5be1000bb453..9015ad0ebb2102 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java @@ -5,13 +5,9 @@ import java.time.temporal.ChronoUnit; import java.util.Optional; - - public class AccessTokenUtil { - /** - * Convert an {@link AccessTokenDuration} into its milliseconds equivalent. - */ + /** Convert an {@link AccessTokenDuration} into its milliseconds equivalent. */ public static Optional mapDurationToMs(final AccessTokenDuration duration) { switch (duration) { case ONE_HOUR: @@ -29,9 +25,10 @@ public static Optional mapDurationToMs(final AccessTokenDuration duration) case NO_EXPIRY: return Optional.empty(); default: - throw new RuntimeException(String.format("Unrecognized access token duration %s provided", duration)); + throw new RuntimeException( + String.format("Unrecognized access token duration %s provided", duration)); } } - private AccessTokenUtil() { } + private AccessTokenUtil() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java index cd55d81aec6ad1..e17e3cb6fb64a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatefulTokenService; @@ -8,12 +10,13 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.AccessTokenType; import com.linkedin.datahub.graphql.generated.CreateAccessTokenInput; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.metadata.Constants; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -22,12 +25,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for creating personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for creating personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class CreateAccessTokenResolver implements DataFetcher> { @@ -38,62 +36,87 @@ public CreateAccessTokenResolver(final StatefulTokenService statefulTokenService } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final CreateAccessTokenInput input = bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); - - log.info("User {} requesting new access token for user {} ", context.getActorUrn(), input.getActorUrn()); - - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Date date = new Date(); - final long createdAtInMs = date.getTime(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - - final String tokenName = input.getName(); - final String tokenDescription = input.getDescription(); - - final String accessToken = - _statefulTokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null), - createdAtInMs, tokenName, tokenDescription, context.getActorUrn()); - log.info("Generated access token for {} of type {} with duration {}", input.getActorUrn(), input.getType(), - input.getDuration()); - try { - final String tokenHash = _statefulTokenService.hash(accessToken); - - final AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - result.setMetadata(metadata); - - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new access token with name %s", input.getName()), - e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final CreateAccessTokenInput input = + bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); + + log.info( + "User {} requesting new access token for user {} ", + context.getActorUrn(), + input.getActorUrn()); + + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Date date = new Date(); + final long createdAtInMs = date.getTime(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + + final String tokenName = input.getName(); + final String tokenDescription = input.getDescription(); + + final String accessToken = + _statefulTokenService.generateAccessToken( + type, + createActor(input.getType(), actorUrn), + expiresInMs.orElse(null), + createdAtInMs, + tokenName, + tokenDescription, + context.getActorUrn()); + log.info( + "Generated access token for {} of type {} with duration {}", + input.getActorUrn(), + input.getType(), + input.getDuration()); + try { + final String tokenHash = _statefulTokenService.hash(accessToken); + + final AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn( + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + result.setMetadata(metadata); + + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new access token with name %s", input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final CreateAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final CreateAccessTokenInput input) { if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final CreateAccessTokenInput input) { + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final CreateAccessTokenInput input) { return AuthorizationUtils.canManageTokens(context) - || input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + || input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -101,6 +124,7 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java new file mode 100644 index 00000000000000..8372b6b5126a3e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/DebugAccessResolver.java @@ -0,0 +1,241 @@ +package com.linkedin.datahub.graphql.resolvers.auth; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.DebugAccessResult; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.policy.DataHubPolicyInfo; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +public class DebugAccessResolver implements DataFetcher> { + + private static final String LAST_UPDATED_AT_FIELD = "lastUpdatedTimestamp"; + private final EntityClient _entityClient; + private final GraphClient _graphClient; + + public DebugAccessResolver(EntityClient entityClient, GraphClient graphClient) { + _entityClient = entityClient; + _graphClient = graphClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + + if (!AuthorizationUtils.canManageUsersAndGroups(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + final String userUrn = environment.getArgument("userUrn"); + + return populateDebugAccessResult(userUrn, context); + }, + this.getClass().getSimpleName(), + "get"); + } + + public DebugAccessResult populateDebugAccessResult(String userUrn, QueryContext context) { + + try { + final String actorUrn = context.getActorUrn(); + final DebugAccessResult result = new DebugAccessResult(); + final List types = + Arrays.asList("IsMemberOfRole", "IsMemberOfGroup", "IsMemberOfNativeGroup"); + EntityRelationships entityRelationships = getEntityRelationships(userUrn, types, actorUrn); + + List roles = + getUrnsFromEntityRelationships(entityRelationships, Constants.DATAHUB_ROLE_ENTITY_NAME); + + List groups = + getUrnsFromEntityRelationships(entityRelationships, Constants.CORP_GROUP_ENTITY_NAME); + List groupsWithRoles = new ArrayList<>(); + + Set rolesViaGroups = new HashSet<>(); + groups.forEach( + groupUrn -> { + EntityRelationships groupRelationships = + getEntityRelationships(groupUrn, List.of("IsMemberOfRole"), actorUrn); + List rolesOfGroup = + getUrnsFromEntityRelationships( + groupRelationships, Constants.DATAHUB_ROLE_ENTITY_NAME); + if (rolesOfGroup.isEmpty()) { + return; + } + groupsWithRoles.add(groupUrn); + rolesViaGroups.addAll(rolesOfGroup); + }); + Set allRoles = new HashSet<>(roles); + allRoles.addAll(rolesViaGroups); + + result.setRoles(roles); + result.setGroups(groups); + result.setGroupsWithRoles(groupsWithRoles); + result.setRolesViaGroups(new ArrayList<>(rolesViaGroups)); + result.setAllRoles(new ArrayList<>(allRoles)); + + Set policyUrns = getPoliciesFor(context, userUrn, groups, result.getAllRoles()); + + // List of Policy that apply to this user directly or indirectly. + result.setPolicies(policyUrns.stream().map(Urn::toString).collect(Collectors.toList())); + + // List of privileges that this user has directly or indirectly. + result.setPrivileges(new ArrayList<>(getPrivileges(context, policyUrns))); + + return result; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private Set getPrivileges(final QueryContext context, Set policyUrns) { + try { + final Map policies = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.POLICY_ENTITY_NAME, + policyUrns, + ImmutableSet.of(Constants.DATAHUB_POLICY_INFO_ASPECT_NAME)); + + return policies.keySet().stream() + .filter(Objects::nonNull) + .filter(key -> policies.get(key) != null) + .filter(key -> policies.get(key).hasAspects()) + .map(key -> policies.get(key).getAspects()) + .filter(aspectMap -> aspectMap.containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) + .map( + aspectMap -> + new DataHubPolicyInfo( + aspectMap.get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data())) + .map(DataHubPolicyInfo::getPrivileges) + .flatMap(List::stream) + .collect(Collectors.toSet()); + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve privileges from GMS", e); + } + } + + private List getUrnsFromEntityRelationships( + EntityRelationships entityRelationships, String entityName) { + return entityRelationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .filter(entity -> entityName.equals(entity.getEntityType())) + .map(Urn::toString) + .distinct() + .collect(Collectors.toList()); + } + + private EntityRelationships getEntityRelationships( + final String urn, final List types, final String actor) { + return _graphClient.getRelatedEntities( + urn, types, RelationshipDirection.OUTGOING, 0, 100, actor); + } + + private Set getPoliciesFor( + final QueryContext context, + final String user, + final List groups, + final List roles) + throws RemoteInvocationException { + final SortCriterion sortCriterion = + new SortCriterion().setField(LAST_UPDATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + return _entityClient + .search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.POLICY_ENTITY_NAME, + "", + buildFilterToGetPolicies(user, groups, roles), + Collections.singletonList(sortCriterion), + 0, + 10000) + .getEntities() + .stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); + } + + private Filter buildFilterToGetPolicies( + final String user, final List groups, final List roles) { + + // setOr(array(andArray(user), andArray(groups), andArray(roles), andArray(allUsers), + // andArray(allGroups)) + ConjunctiveCriterionArray conjunctiveCriteria = new ConjunctiveCriterionArray(); + + final CriterionArray allUsersAndArray = new CriterionArray(); + allUsersAndArray.add( + new Criterion().setField("allUsers").setValue("true").setCondition(Condition.EQUAL)); + conjunctiveCriteria.add(new ConjunctiveCriterion().setAnd(allUsersAndArray)); + + final CriterionArray allGroupsAndArray = new CriterionArray(); + allGroupsAndArray.add( + new Criterion().setField("allGroups").setValue("true").setCondition(Condition.EQUAL)); + conjunctiveCriteria.add(new ConjunctiveCriterion().setAnd(allGroupsAndArray)); + + if (user != null && !user.isEmpty()) { + final CriterionArray userAndArray = new CriterionArray(); + userAndArray.add( + new Criterion().setField("users").setValue(user).setCondition(Condition.EQUAL)); + conjunctiveCriteria.add(new ConjunctiveCriterion().setAnd(userAndArray)); + } + + if (groups != null && !groups.isEmpty()) { + final CriterionArray groupsAndArray = new CriterionArray(); + groupsAndArray.add( + new Criterion() + .setField("groups") + .setValue("") + .setValues(new StringArray(groups)) + .setCondition(Condition.EQUAL)); + conjunctiveCriteria.add(new ConjunctiveCriterion().setAnd(groupsAndArray)); + } + + if (roles != null && !roles.isEmpty()) { + final CriterionArray rolesAndArray = new CriterionArray(); + rolesAndArray.add( + new Criterion() + .setField("roles") + .setValue("") + .setValues(new StringArray(roles)) + .setCondition(Condition.EQUAL)); + conjunctiveCriteria.add(new ConjunctiveCriterion().setAnd(rolesAndArray)); + } + return new Filter().setOr(conjunctiveCriteria); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java new file mode 100644 index 00000000000000..186dfe658c2cfd --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenMetadataResolver.java @@ -0,0 +1,62 @@ +package com.linkedin.datahub.graphql.resolvers.auth; + +import com.datahub.authentication.token.StatefulTokenService; +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.types.auth.AccessTokenMetadataType; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class GetAccessTokenMetadataResolver + implements DataFetcher> { + + private final StatefulTokenService _tokenService; + private final EntityClient _entityClient; + + public GetAccessTokenMetadataResolver( + final StatefulTokenService tokenService, EntityClient entityClient) { + _tokenService = tokenService; + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String token = environment.getArgument("token"); + log.info("User {} requesting access token metadata information.", context.getActorUrn()); + if (!AuthorizationUtils.canManageTokens(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + AccessTokenMetadataType metadataType = new AccessTokenMetadataType(_entityClient); + final String tokenHash = _tokenService.hash(token); + final String tokenUrn = _tokenService.tokenUrnFromKey(tokenHash).toString(); + try { + List> batchLoad = + metadataType.batchLoad(ImmutableList.of(tokenUrn), context); + if (batchLoad.isEmpty()) { + return null; + } + return batchLoad.get(0).getData(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java index 5ac4ec8ac3a6b5..4594fa5f891868 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatelessTokenService; @@ -7,6 +9,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; import com.linkedin.datahub.graphql.generated.AccessTokenType; @@ -18,12 +21,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for generating personal & service principal access tokens - */ +/** Resolver for generating personal & service principal access tokens */ @Slf4j public class GetAccessTokenResolver implements DataFetcher> { @@ -34,39 +32,51 @@ public GetAccessTokenResolver(final StatelessTokenService tokenService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final GetAccessTokenInput input = bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final GetAccessTokenInput input = + bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - final String accessToken = - _tokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); - AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - return result; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + final String accessToken = + _tokenService.generateAccessToken( + type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); + AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + return result; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final GetAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final GetAccessTokenInput input) { // Currently only an actor can generate a personal token for themselves. if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final GetAccessTokenInput input) { - return input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final GetAccessTokenInput input) { + return input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -74,14 +84,16 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, createUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } private Urn createUrn(final String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to validate provided urn %s", urnStr)); + throw new IllegalArgumentException( + String.format("Failed to validate provided urn %s", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index f9ba552d349e04..e0ecebbbc7bc2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.EntityType; @@ -11,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchResult; @@ -23,14 +25,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for listing personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for listing personal & service principal v2-type (stateful) access tokens. */ @Slf4j -public class ListAccessTokensResolver implements DataFetcher> { +public class ListAccessTokensResolver + implements DataFetcher> { private static final String EXPIRES_AT_FIELD_NAME = "expiresAt"; @@ -41,60 +39,94 @@ public ListAccessTokensResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final ListAccessTokenInput input = bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final ListAccessTokenInput input = + bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - log.info("User {} listing access tokens with filters {}", context.getActorUrn(), filters.toString()); + log.info( + "User {} listing access tokens with filters {}", + context.getActorUrn(), + filters.toString()); - if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); - final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, - getAuthentication(environment), new SearchFlags().setFulltext(true)); + if (AuthorizationUtils.canManageTokens(context) + || isListingSelfTokens(filters, context)) { + try { + final List sortCriteria = + Collections.singletonList( + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)); + final SearchResult searchResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + Constants.ACCESS_TOKEN_ENTITY_NAME, + "", + buildFilter( + filters, + Collections.emptyList(), + context.getOperationContext().getAspectRetriever()), + sortCriteria, + start, + count); - final List tokens = searchResult.getEntities().stream().map(entity -> { - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(entity.getEntity().toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - return metadata; - }).collect(Collectors.toList()); + final List tokens = + searchResult.getEntities().stream() + .map( + entity -> { + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn(entity.getEntity().toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + return metadata; + }) + .collect(Collectors.toList()); - final ListAccessTokenResult result = new ListAccessTokenResult(); - result.setTokens(tokens); - result.setStart(searchResult.getFrom()); - result.setCount(searchResult.getPageSize()); - result.setTotal(searchResult.getNumEntities()); + final ListAccessTokenResult result = new ListAccessTokenResult(); + result.setTokens(tokens); + result.setStart(searchResult.getFrom()); + result.setCount(searchResult.getPageSize()); + result.setTotal(searchResult.getNumEntities()); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list access tokens", e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list access tokens", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } /** - * Utility method to answer: Does the existing security context have permissions to generate their personal tokens - * AND is the request coming in requesting those personal tokens? - *

- * Note: We look for the actorUrn field because a token generated by someone else means that the generator actor has - * manage all access token privileges which means that he/she will be bound to just listing their own tokens. + * Utility method to answer: Does the existing security context have permissions to generate their + * personal tokens AND is the request coming in requesting those personal tokens? + * + *

Note: We look for the actorUrn field because a token generated by someone else means that + * the generator actor has manage all access token privileges which means that he/she will be + * bound to just listing their own tokens. * * @param filters The filters being used in the request. * @param context Current security context. * @return A boolean stating if the current user can list its personal tokens. */ - private boolean isListingSelfTokens(final List filters, final QueryContext context) { - return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); + private boolean isListingSelfTokens( + final List filters, final QueryContext context) { + return AuthorizationUtils.canGeneratePersonalAccessToken(context) + && filters.stream() + .anyMatch( + filter -> + filter.getField().equals("ownerUrn") + && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java index 252c0eaba6e854..eb152087699024 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.token.StatefulTokenService; import com.google.common.collect.ImmutableSet; import com.linkedin.access.token.DataHubAccessTokenInfo; @@ -7,6 +9,7 @@ import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -18,42 +21,41 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for revoking personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for revoking personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class RevokeAccessTokenResolver implements DataFetcher> { private final EntityClient _entityClient; private final StatefulTokenService _statefulTokenService; - public RevokeAccessTokenResolver(final EntityClient entityClient, final StatefulTokenService statefulTokenService) { + public RevokeAccessTokenResolver( + final EntityClient entityClient, final StatefulTokenService statefulTokenService) { _entityClient = entityClient; _statefulTokenService = statefulTokenService; } @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); - log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); + log.info("User {} revoking access token", context.getActorUrn()); - if (isAuthorizedToRevokeToken(context, tokenId)) { - try { - _statefulTokenService.revokeAccessToken(tokenId); - } catch (Exception e) { - throw new RuntimeException("Failed to revoke access token", e); - } - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToRevokeToken(context, tokenId)) { + try { + _statefulTokenService.revokeAccessToken(tokenId); + } catch (Exception e) { + throw new RuntimeException("Failed to revoke access token", e); + } + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private boolean isAuthorizedToRevokeToken(final QueryContext context, final String tokenId) { @@ -62,12 +64,17 @@ private boolean isAuthorizedToRevokeToken(final QueryContext context, final Stri private boolean isOwnerOfAccessToken(final QueryContext context, final String tokenId) { try { - final EntityResponse entityResponse = _entityClient.getV2(Constants.ACCESS_TOKEN_ENTITY_NAME, - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), - ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), context.getAuthentication()); + final EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + Constants.ACCESS_TOKEN_ENTITY_NAME, + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), + ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { + final DataMap data = + entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(data); return tokenInfo.getOwnerUrn().toString().equals(context.getActorUrn()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java index 4a1964b36032ca..a8636dc2880829 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java @@ -1,61 +1,68 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowsePathsInput; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.Collections; -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowsePathsResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); - - private final Map> _typeToEntity; - - public BrowsePathsResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } - - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final BrowsePathsInput input = bindArgument(environment.getArgument("input"), BrowsePathsInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Fetch browse paths. entity type: %s, urn: %s", - input.getType(), - input.getUrn())); - if (_typeToEntity.containsKey(input.getType())) { - return _typeToEntity.get(input.getType()).browsePaths(input.getUrn(), environment.getContext()); - } - // Browse path is impl detail. - return Collections.emptyList(); - } catch (Exception e) { - _logger.error("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()) + " " + e.getMessage()); - throw new RuntimeException("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()), e); + private static final Logger _logger = + LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); + + private final Map> _typeToEntity; + + public BrowsePathsResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } + + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final BrowsePathsInput input = + bindArgument(environment.getArgument("input"), BrowsePathsInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Fetch browse paths. entity type: %s, urn: %s", + input.getType(), input.getUrn())); + if (_typeToEntity.containsKey(input.getType())) { + return _typeToEntity + .get(input.getType()) + .browsePaths(input.getUrn(), environment.getContext()); } - }); - } + // Browse path is impl detail. + return Collections.emptyList(); + } catch (Exception e) { + _logger.error( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java index 9c95eceb1e78fd..619e950bd106ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java @@ -1,77 +1,72 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BrowseInput; import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowseResolver implements DataFetcher> { - private static final int DEFAULT_START = 0; - private static final int DEFAULT_COUNT = 10; + private static final int DEFAULT_START = 0; + private static final int DEFAULT_COUNT = 10; - private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); + private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public BrowseResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } + public BrowseResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); - final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; - final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; + final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; + final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count)); - return _typeToEntity.get(input.getType()).browse( - input.getPath(), - input.getFilters(), - start, - count, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count) + " " + e.getMessage()); - throw new RuntimeException("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count), e); - } - }); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count)); + return _typeToEntity + .get(input.getType()) + .browse( + input.getPath(), input.getFilters(), start, count, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java index 81f82c93f1fa74..54faa567723366 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.browse; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.types.BrowsableEntityType; -import com.linkedin.datahub.graphql.generated.BrowsePath; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; public class EntityBrowsePathsResolver implements DataFetcher>> { @@ -24,12 +25,16 @@ public CompletableFuture> get(DataFetchingEnvironment environme final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - return CompletableFuture.supplyAsync(() -> { - try { - return _browsableType.browsePaths(urn, context); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return _browsableType.browsePaths(urn, context); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolver.java new file mode 100644 index 00000000000000..54812d3442c9c1 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolver.java @@ -0,0 +1,119 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_ASPECT; + +import com.linkedin.businessattribute.BusinessAttributeAssociation; +import com.linkedin.businessattribute.BusinessAttributes; +import com.linkedin.common.urn.BusinessAttributeUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.AddBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.ResourceRefInput; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class AddBusinessAttributeResolver implements DataFetcher> { + private final EntityService entityService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final AddBusinessAttributeInput input = + bindArgument(environment.getArgument("input"), AddBusinessAttributeInput.class); + final Urn businessAttributeUrn = UrnUtils.getUrn(input.getBusinessAttributeUrn()); + final List resourceRefInputs = input.getResourceUrn(); + validateBusinessAttribute(context.getOperationContext(), businessAttributeUrn); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + addBusinessAttributeToResource( + context.getOperationContext(), + businessAttributeUrn, + resourceRefInputs, + UrnUtils.getUrn(context.getActorUrn()), + entityService); + return true; + } catch (Exception e) { + log.error( + String.format( + "Failed to add Business Attribute %s to resources %s", + businessAttributeUrn, resourceRefInputs)); + throw new RuntimeException( + String.format( + "Failed to add Business Attribute %s to resources %s", + businessAttributeUrn, resourceRefInputs), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private void validateBusinessAttribute( + @Nonnull OperationContext opContext, Urn businessAttributeUrn) { + if (!entityService.exists(opContext, businessAttributeUrn, true)) { + throw new IllegalArgumentException( + String.format("This urn does not exist: %s", businessAttributeUrn)); + } + } + + private void addBusinessAttributeToResource( + @Nonnull OperationContext opContext, + Urn businessAttributeUrn, + List resourceRefInputs, + Urn actorUrn, + EntityService entityService) + throws URISyntaxException { + List proposals = new ArrayList<>(); + for (ResourceRefInput resourceRefInput : resourceRefInputs) { + proposals.add( + buildAddBusinessAttributeToEntityProposal( + opContext, businessAttributeUrn, resourceRefInput, entityService, actorUrn)); + } + EntityUtils.ingestChangeProposals(opContext, proposals, entityService, actorUrn, false); + } + + private MetadataChangeProposal buildAddBusinessAttributeToEntityProposal( + @Nonnull OperationContext opContext, + Urn businessAttributeUrn, + ResourceRefInput resource, + EntityService entityService, + Urn actorUrn) + throws URISyntaxException { + BusinessAttributes businessAttributes = + (BusinessAttributes) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + BUSINESS_ATTRIBUTE_ASPECT, + entityService, + new BusinessAttributes()); + if (!businessAttributes.hasBusinessAttribute()) { + businessAttributes.setBusinessAttribute(new BusinessAttributeAssociation()); + } + BusinessAttributeAssociation businessAttributeAssociation = + businessAttributes.getBusinessAttribute(); + businessAttributeAssociation.setBusinessAttributeUrn( + BusinessAttributeUrn.createFromUrn(businessAttributeUrn)); + businessAttributes.setBusinessAttribute(businessAttributeAssociation); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), BUSINESS_ATTRIBUTE_ASPECT, businessAttributes); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/BusinessAttributeAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/BusinessAttributeAuthorizationUtils.java new file mode 100644 index 00000000000000..041f5e9ade77f0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/BusinessAttributeAuthorizationUtils.java @@ -0,0 +1,37 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.metadata.authorization.PoliciesConfig; +import javax.annotation.Nonnull; + +public class BusinessAttributeAuthorizationUtils { + private BusinessAttributeAuthorizationUtils() {} + + public static boolean canCreateBusinessAttribute(@Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_BUSINESS_ATTRIBUTE_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_BUSINESS_ATTRIBUTE_PRIVILEGE.getType())))); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); + } + + public static boolean canManageBusinessAttribute(@Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_BUSINESS_ATTRIBUTE_PRIVILEGE.getType())))); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolver.java new file mode 100644 index 00000000000000..d9cb668cc051d8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolver.java @@ -0,0 +1,133 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithKey; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME; + +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.businessattribute.BusinessAttributeKey; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.CreateBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.OwnerEntityType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.BusinessAttributeUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; +import com.linkedin.datahub.graphql.types.businessattribute.mappers.BusinessAttributeMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.service.BusinessAttributeService; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class CreateBusinessAttributeResolver + implements DataFetcher> { + private final EntityClient _entityClient; + private final EntityService _entityService; + private final BusinessAttributeService businessAttributeService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + CreateBusinessAttributeInput input = + bindArgument(environment.getArgument("input"), CreateBusinessAttributeInput.class); + if (!BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final BusinessAttributeKey businessAttributeKey = new BusinessAttributeKey(); + String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + businessAttributeKey.setId(id); + + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn( + businessAttributeKey, BUSINESS_ATTRIBUTE_ENTITY_NAME))) { + throw new IllegalArgumentException("This Business Attribute already exists!"); + } + + if (BusinessAttributeUtils.hasNameConflict(input.getName(), context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists as Business Attribute. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal changeProposal = + buildMetadataChangeProposalWithKey( + businessAttributeKey, + BUSINESS_ATTRIBUTE_ENTITY_NAME, + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + mapBusinessAttributeInfo(input, context)); + + // Ingest the MCP + Urn businessAttributeUrn = + UrnUtils.getUrn( + _entityClient.ingestProposal(context.getOperationContext(), changeProposal)); + OwnerUtils.addCreatorAsOwner( + context, + businessAttributeUrn.toString(), + OwnerEntityType.CORP_USER, + _entityService); + return BusinessAttributeMapper.map( + context, + businessAttributeService.getBusinessAttributeEntityResponse( + context.getOperationContext(), businessAttributeUrn)); + + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Business Attribute with name: {}: {}", + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to create Business Attribute with name: %s", input.getName()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private BusinessAttributeInfo mapBusinessAttributeInfo( + CreateBusinessAttributeInput input, QueryContext context) { + final BusinessAttributeInfo info = new BusinessAttributeInfo(); + info.setFieldPath(input.getName(), SetMode.DISALLOW_NULL); + info.setName(input.getName(), SetMode.DISALLOW_NULL); + info.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + info.setType( + BusinessAttributeUtils.mapSchemaFieldDataType(input.getType()), SetMode.IGNORE_NULL); + info.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + info.setLastModified( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + return info; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolver.java new file mode 100644 index 00000000000000..bec37731a4ca03 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolver.java @@ -0,0 +1,61 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.entity.client.EntityClient; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Resolver responsible for hard deleting a particular Business Attribute */ +@Slf4j +@RequiredArgsConstructor +public class DeleteBusinessAttributeResolver implements DataFetcher> { + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final Urn businessAttributeUrn = UrnUtils.getUrn(environment.getArgument("urn")); + if (!BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + if (!_entityClient.exists(context.getOperationContext(), businessAttributeUrn)) { + throw new RuntimeException( + String.format("This urn does not exist: %s", businessAttributeUrn)); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(context.getOperationContext(), businessAttributeUrn); + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + context.getOperationContext(), businessAttributeUrn); + } catch (Exception e) { + log.error( + String.format( + "Exception while attempting to clear all entity references for Business Attribute with urn %s", + businessAttributeUrn), + e); + } + }); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to delete Business Attribute with urn %s", businessAttributeUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/ListBusinessAttributesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/ListBusinessAttributesResolver.java new file mode 100644 index 00000000000000..ebcdbe5e064942 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/ListBusinessAttributesResolver.java @@ -0,0 +1,95 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.ListBusinessAttributesInput; +import com.linkedin.datahub.graphql.generated.ListBusinessAttributesResult; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +/** Resolver used for listing Business Attributes. */ +@Slf4j +public class ListBusinessAttributesResolver + implements DataFetcher> { + + private static final Integer DEFAULT_START = 0; + private static final Integer DEFAULT_COUNT = 20; + private static final String DEFAULT_QUERY = ""; + + private final EntityClient _entityClient; + + public ListBusinessAttributesResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final ListBusinessAttributesInput input = + bindArgument(environment.getArgument("input"), ListBusinessAttributesInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count); + + final ListBusinessAttributesResult result = new ListBusinessAttributesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setBusinessAttributes( + mapUnresolvedBusinessAttributes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Business Attributes", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private List mapUnresolvedBusinessAttributes(final List entityUrns) { + final List results = new ArrayList<>(); + for (final Urn urn : entityUrns) { + final BusinessAttribute unresolvedBusinessAttribute = new BusinessAttribute(); + unresolvedBusinessAttribute.setUrn(urn.toString()); + unresolvedBusinessAttribute.setType(EntityType.BUSINESS_ATTRIBUTE); + results.add(unresolvedBusinessAttribute); + } + return results; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolver.java new file mode 100644 index 00000000000000..d85282c921dffc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolver.java @@ -0,0 +1,95 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_ASPECT; + +import com.linkedin.businessattribute.BusinessAttributes; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.AddBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.ResourceRefInput; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class RemoveBusinessAttributeResolver implements DataFetcher> { + private final EntityService entityService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final AddBusinessAttributeInput input = + bindArgument(environment.getArgument("input"), AddBusinessAttributeInput.class); + final Urn businessAttributeUrn = UrnUtils.getUrn(input.getBusinessAttributeUrn()); + final List resourceRefInputs = input.getResourceUrn(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + removeBusinessAttribute( + context.getOperationContext(), + resourceRefInputs, + UrnUtils.getUrn(context.getActorUrn())); + return true; + } catch (Exception e) { + log.error( + String.format( + "Failed to remove Business Attribute with urn %s from resources %s", + businessAttributeUrn, resourceRefInputs)); + throw new RuntimeException( + String.format( + "Failed to remove Business Attribute with urn %s from resources %s", + businessAttributeUrn, resourceRefInputs), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private void removeBusinessAttribute( + @Nonnull OperationContext opContext, List resourceRefInputs, Urn actorUrn) { + List proposals = new ArrayList<>(); + for (ResourceRefInput resourceRefInput : resourceRefInputs) { + proposals.add( + buildRemoveBusinessAttributeFromResourceProposal( + opContext, resourceRefInput, entityService)); + } + EntityUtils.ingestChangeProposals(opContext, proposals, entityService, actorUrn, false); + } + + private MetadataChangeProposal buildRemoveBusinessAttributeFromResourceProposal( + @Nonnull OperationContext opContext, + ResourceRefInput resource, + EntityService entityService) { + BusinessAttributes businessAttributes = + (BusinessAttributes) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + BUSINESS_ATTRIBUTE_ASPECT, + entityService, + new BusinessAttributes()); + if (!businessAttributes.hasBusinessAttribute()) { + throw new RuntimeException( + String.format("Schema field has not attached with business attribute")); + } + businessAttributes.removeBusinessAttribute(); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), BUSINESS_ATTRIBUTE_ASPECT, businessAttributes); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolver.java new file mode 100644 index 00000000000000..16120a27261a48 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolver.java @@ -0,0 +1,150 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.UpdateBusinessAttributeInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.BusinessAttributeUtils; +import com.linkedin.datahub.graphql.types.businessattribute.mappers.BusinessAttributeMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.service.BusinessAttributeService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class UpdateBusinessAttributeResolver + implements DataFetcher> { + + private final EntityClient _entityClient; + private final BusinessAttributeService businessAttributeService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + QueryContext context = environment.getContext(); + UpdateBusinessAttributeInput input = + bindArgument(environment.getArgument("input"), UpdateBusinessAttributeInput.class); + final Urn businessAttributeUrn = UrnUtils.getUrn(environment.getArgument("urn")); + if (!BusinessAttributeAuthorizationUtils.canCreateBusinessAttribute(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + if (!_entityClient.exists(context.getOperationContext(), businessAttributeUrn)) { + throw new RuntimeException( + String.format("This urn does not exist: %s", businessAttributeUrn)); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + Urn updatedBusinessAttributeUrn = + updateBusinessAttribute(input, businessAttributeUrn, context); + return BusinessAttributeMapper.map( + context, + businessAttributeService.getBusinessAttributeEntityResponse( + context.getOperationContext(), updatedBusinessAttributeUrn)); + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Business Attribute with urn %s", businessAttributeUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private Urn updateBusinessAttribute( + UpdateBusinessAttributeInput input, Urn businessAttributeUrn, QueryContext context) { + try { + BusinessAttributeInfo businessAttributeInfo = + getBusinessAttributeInfo(context.getOperationContext(), businessAttributeUrn); + // 1. Check whether the Business Attribute exists + if (businessAttributeInfo == null) { + throw new IllegalArgumentException( + String.format( + "Failed to update Business Attribute. Business Attribute with urn %s does not exist.", + businessAttributeUrn)); + } + + // 2. Apply changes to existing Business Attribute + if (Objects.nonNull(input.getName())) { + if (BusinessAttributeUtils.hasNameConflict(input.getName(), context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists as Business Attribute. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + businessAttributeInfo.setName(input.getName()); + businessAttributeInfo.setFieldPath(input.getName()); + } + if (Objects.nonNull(input.getDescription())) { + businessAttributeInfo.setDescription(input.getDescription()); + } + if (Objects.nonNull(input.getType())) { + businessAttributeInfo.setType( + BusinessAttributeUtils.mapSchemaFieldDataType(input.getType())); + } + businessAttributeInfo.setLastModified( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + // 3. Write changes to GMS + return UrnUtils.getUrn( + _entityClient.ingestProposal( + context.getOperationContext(), + AspectUtils.buildMetadataChangeProposal( + businessAttributeUrn, + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo))); + + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + @Nullable + private BusinessAttributeInfo getBusinessAttributeInfo( + @Nonnull OperationContext opContext, @Nonnull final Urn businessAttributeUrn) { + Objects.requireNonNull(businessAttributeUrn, "businessAttributeUrn must not be null"); + Objects.requireNonNull(opContext, "opContext must not be null"); + final EntityResponse response = + businessAttributeService.getBusinessAttributeEntityResponse( + opContext, businessAttributeUrn); + if (response != null + && response.getAspects().containsKey(Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME)) { + return new BusinessAttributeInfo( + response + .getAspects() + .get(Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + // No aspect found + return null; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 76abddc9a99a96..b54ca398aef980 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -1,34 +1,39 @@ package com.linkedin.datahub.graphql.resolvers.chart; +import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResultV2; +import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -36,6 +41,7 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final BrowseV2Input input = bindArgument(environment.getArgument("input"), BrowseV2Input.class); - final String entityName = EntityTypeMapper.getName(input.getType()); + final List entityNames = getEntityNames(input); final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); - return CompletableFuture.supplyAsync(() -> { - try { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - final String pathStr = input.getPath().size() > 0 ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + context.getOperationContext(), + _viewService, + UrnUtils.getUrn(input.getViewUrn())) + : null; + final String pathStr = + input.getPath().size() > 0 + ? BROWSE_PATH_V2_DELIMITER + + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) + : ""; + final Filter inputFilter = + ResolverUtils.buildFilter( + null, input.getOrFilters(), context.getOperationContext().getAspectRetriever()); - BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, - pathStr, - maybeResolvedView != null - ? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter()) - : filter, - sanitizedQuery, - start, - count, - context.getAuthentication() - ); - return mapBrowseResults(browseResults); - } catch (Exception e) { - throw new RuntimeException("Failed to execute browse V2", e); - } - }); + BrowseResultV2 browseResults = + _entityClient.browseV2( + context.getOperationContext().withSearchFlags(flags -> searchFlags), + entityNames, + pathStr, + maybeResolvedView != null + ? SearchUtils.combineFilters( + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, + sanitizedQuery, + start, + count); + return mapBrowseResults(context, browseResults); + } catch (Exception e) { + throw new RuntimeException("Failed to execute browse V2", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { + public static List getEntityNames(BrowseV2Input input) { + List entityTypes; + if (input.getTypes() != null && input.getTypes().size() > 0) { + entityTypes = input.getTypes(); + } else if (input.getType() != null) { + entityTypes = ImmutableList.of(input.getType()); + } else { + entityTypes = BROWSE_ENTITY_TYPES; + } + return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } + + private BrowseResultsV2 mapBrowseResults( + @Nullable QueryContext context, BrowseResultV2 browseResults) { BrowseResultsV2 results = new BrowseResultsV2(); results.setTotal(browseResults.getNumGroups()); results.setStart(browseResults.getFrom()); results.setCount(browseResults.getPageSize()); List groups = new ArrayList<>(); - browseResults.getGroups().forEach(group -> { - BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); - browseGroup.setName(group.getName()); - browseGroup.setCount(group.getCount()); - browseGroup.setHasSubGroups(group.isHasSubGroups()); - if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); - } - groups.add(browseGroup); - }); + browseResults + .getGroups() + .forEach( + group -> { + BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); + browseGroup.setName(group.getName()); + browseGroup.setCount(group.getCount()); + browseGroup.setHasSubGroups(group.isHasSubGroups()); + if (group.hasUrn() && group.getUrn() != null) { + browseGroup.setEntity(UrnToEntityMapper.map(context, group.getUrn())); + } + groups.add(browseGroup); + }); results.setGroups(groups); BrowseResultMetadata resultMetadata = new BrowseResultMetadata(); - resultMetadata.setPath(Arrays.stream(browseResults.getMetadata().getPath() - .split(BROWSE_PATH_V2_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()) - ); + resultMetadata.setPath( + Arrays.stream(browseResults.getMetadata().getPath().split(BROWSE_PATH_V2_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList())); resultMetadata.setTotalNumEntities(browseResults.getMetadata().getTotalNumEntities()); results.setMetadata(resultMetadata); return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index 207da02de6ec2d..68468f195a5af0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -1,34 +1,26 @@ package com.linkedin.datahub.graphql.resolvers.chart; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChartStatsSummary; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; - @Slf4j -public class ChartStatsSummaryResolver implements DataFetcher> { +public class ChartStatsSummaryResolver + implements DataFetcher> { private final TimeseriesAspectService timeseriesAspectService; - private final Cache summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) - .build(); } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { // Not yet implemented return CompletableFuture.completedFuture(null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index f6bc68caa0821c..259d05c631557d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -35,10 +35,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * Resolver responsible for serving app configurations to the React UI. - */ +/** Resolver responsible for serving app configurations to the React UI. */ public class AppConfigResolver implements DataFetcher> { private final GitVersion _gitVersion; @@ -82,7 +79,8 @@ public AppConfigResolver( } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); @@ -103,19 +101,20 @@ public CompletableFuture get(final DataFetchingEnvironment environmen final PoliciesConfig policiesConfig = new PoliciesConfig(); policiesConfig.setEnabled(_authorizationConfiguration.getDefaultAuthorizer().isEnabled()); - policiesConfig.setPlatformPrivileges(com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES - .stream() - .map(this::mapPrivilege) - .collect(Collectors.toList())); + policiesConfig.setPlatformPrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES.stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); - policiesConfig.setResourcePrivileges(com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES - .stream() - .map(this::mapResourcePrivileges) - .collect(Collectors.toList()) - ); + policiesConfig.setResourcePrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES.stream() + .map(this::mapResourcePrivileges) + .collect(Collectors.toList())); final IdentityManagementConfig identityManagementConfig = new IdentityManagementConfig(); - identityManagementConfig.setEnabled(true); // Identity Management always enabled. TODO: Understand if there's a case where this should change. + identityManagementConfig.setEnabled( + true); // Identity Management always enabled. TODO: Understand if there's a case where this + // should change. final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig(); ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled()); @@ -127,13 +126,20 @@ public CompletableFuture get(final DataFetchingEnvironment environmen appConfig.setAuthConfig(authConfig); final VisualConfig visualConfig = new VisualConfig(); - if (_visualConfiguration != null && _visualConfiguration.getAssets() != null) { - visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); - visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + if (_visualConfiguration != null) { + if (_visualConfiguration.getAssets() != null) { + visualConfig.setLogoUrl(_visualConfiguration.getAssets().getLogoUrl()); + visualConfig.setFaviconUrl(_visualConfiguration.getAssets().getFaviconUrl()); + } + if (_visualConfiguration.getAppTitle() != null) { + visualConfig.setAppTitle(_visualConfiguration.getAppTitle()); + } + visualConfig.setHideGlossary(_visualConfiguration.isHideGlossary()); } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); - queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize()); + queriesTabConfig.setQueriesTabResultSize( + _visualConfiguration.getQueriesTab().getQueriesTabResultSize()); visualConfig.setQueriesTab(queriesTabConfig); } if (_visualConfiguration != null && _visualConfiguration.getEntityProfile() != null) { @@ -148,7 +154,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen if (_visualConfiguration != null && _visualConfiguration.getSearchResult() != null) { SearchResultsVisualConfig searchResultsVisualConfig = new SearchResultsVisualConfig(); if (_visualConfiguration.getSearchResult().getEnableNameHighlight() != null) { - searchResultsVisualConfig.setEnableNameHighlight(_visualConfiguration.getSearchResult().getEnableNameHighlight()); + searchResultsVisualConfig.setEnableNameHighlight( + _visualConfiguration.getSearchResult().getEnableNameHighlight()); } visualConfig.setSearchResult(searchResultsVisualConfig); } @@ -166,14 +173,22 @@ public CompletableFuture get(final DataFetchingEnvironment environmen viewsConfig.setEnabled(_viewsConfiguration.isEnabled()); appConfig.setViewsConfig(viewsConfig); - final FeatureFlagsConfig featureFlagsConfig = FeatureFlagsConfig.builder() - .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) - .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) - .setShowBrowseV2(_featureFlags.isShowBrowseV2()) - .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) - .setShowAccessManagement(_featureFlags.isShowAccessManagement()) - .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) - .build(); + final FeatureFlagsConfig featureFlagsConfig = + FeatureFlagsConfig.builder() + .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) + .setBusinessAttributeEntityEnabled(_featureFlags.isBusinessAttributeEntityEnabled()) + .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) + .setShowBrowseV2(_featureFlags.isShowBrowseV2()) + .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setErModelRelationshipFeatureEnabled( + _featureFlags.isErModelRelationshipFeatureEnabled()) + .setShowAccessManagement(_featureFlags.isShowAccessManagement()) + .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) + .setDataContractsEnabled(_featureFlags.isDataContractsEnabled()) + .setEditableDatasetNameEnabled(_featureFlags.isEditableDatasetNameEnabled()) + .setShowSeparateSiblings(_featureFlags.isShowSeparateSiblings()) + .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -185,14 +200,17 @@ private ResourcePrivileges mapResourcePrivileges( final ResourcePrivileges graphQLPrivileges = new ResourcePrivileges(); graphQLPrivileges.setResourceType(resourcePrivileges.getResourceType()); graphQLPrivileges.setResourceTypeDisplayName(resourcePrivileges.getResourceTypeDisplayName()); - graphQLPrivileges.setEntityType(mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); + graphQLPrivileges.setEntityType( + mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); graphQLPrivileges.setPrivileges( - resourcePrivileges.getPrivileges().stream().map(this::mapPrivilege).collect(Collectors.toList()) - ); + resourcePrivileges.getPrivileges().stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); return graphQLPrivileges; } - private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { + private Privilege mapPrivilege( + com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { final Privilege graphQLPrivilege = new Privilege(); graphQLPrivilege.setType(privilege.getType()); graphQLPrivilege.setDisplayName(privilege.getDisplayName()); @@ -202,30 +220,62 @@ private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfi private EntityType mapResourceTypeToEntityType(final String resourceType) { // TODO: Is there a better way to instruct the UI to present a searchable resource? - if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES.getResourceType().equals(resourceType)) { + if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATASET; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DASHBOARD; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CHART; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_FLOW; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_JOB; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.TAG; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_TERM; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_NODE; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DOMAIN; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CONTAINER; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_GROUP; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_USER; + } else if (com.linkedin.metadata.authorization.PoliciesConfig.ER_MODEL_RELATIONSHIP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { + return EntityType.ER_MODEL_RELATIONSHIP; + } else if (com.linkedin.metadata.authorization.PoliciesConfig.BUSINESS_ATTRIBUTE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { + return EntityType.BUSINESS_ATTRIBUTE; } else { return null; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionMapper.java new file mode 100644 index 00000000000000..a4ad332d5946d2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionMapper.java @@ -0,0 +1,104 @@ +package com.linkedin.datahub.graphql.resolvers.connection; + +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubConnection; +import com.linkedin.datahub.graphql.generated.DataHubConnectionDetails; +import com.linkedin.datahub.graphql.generated.DataHubJsonConnection; +import com.linkedin.datahub.graphql.generated.DataPlatform; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import io.datahubproject.metadata.services.SecretService; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ConnectionMapper { + /** + * Maps a GMS encrypted connection details object into the decrypted form returned by the GraphQL + * API. + * + *

Returns null if the Entity does not have the required aspects: dataHubConnectionDetails or + * dataPlatformInstance. + */ + @Nullable + public static DataHubConnection map( + @Nonnull final QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nonnull final SecretService secretService) { + // If the connection does not exist, simply return null + if (!hasAspects(entityResponse)) { + return null; + } + + final DataHubConnection result = new DataHubConnection(); + final Urn entityUrn = entityResponse.getUrn(); + final EnvelopedAspectMap aspects = entityResponse.getAspects(); + + result.setUrn(entityUrn.toString()); + result.setType(EntityType.DATAHUB_CONNECTION); + + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.DATAHUB_CONNECTION_DETAILS_ASPECT_NAME); + if (envelopedAssertionInfo != null) { + result.setDetails( + mapConnectionDetails( + context, + new com.linkedin.connection.DataHubConnectionDetails( + envelopedAssertionInfo.getValue().data()), + secretService)); + } + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + if (envelopedPlatformInstance != null) { + final DataMap data = envelopedPlatformInstance.getValue().data(); + result.setPlatform(mapPlatform(new DataPlatformInstance(data))); + } + return result; + } + + private static DataHubConnectionDetails mapConnectionDetails( + @Nonnull final QueryContext context, + @Nonnull final com.linkedin.connection.DataHubConnectionDetails gmsDetails, + @Nonnull final SecretService secretService) { + final DataHubConnectionDetails result = new DataHubConnectionDetails(); + result.setType( + com.linkedin.datahub.graphql.generated.DataHubConnectionDetailsType.valueOf( + gmsDetails.getType().toString())); + if (gmsDetails.hasJson() && ConnectionUtils.canManageConnections(context)) { + result.setJson(mapJsonConnectionDetails(gmsDetails.getJson(), secretService)); + } + if (gmsDetails.hasName()) { + result.setName(gmsDetails.getName()); + } + return result; + } + + private static DataHubJsonConnection mapJsonConnectionDetails( + @Nonnull final com.linkedin.connection.DataHubJsonConnection gmsJsonConnection, + @Nonnull final SecretService secretService) { + final DataHubJsonConnection result = new DataHubJsonConnection(); + // Decrypt the BLOB! + result.setBlob(secretService.decrypt(gmsJsonConnection.getEncryptedBlob())); + return result; + } + + private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { + // Set dummy platform to be resolved. + final DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(platformInstance.getPlatform().toString()); + return partialPlatform; + } + + private static boolean hasAspects(@Nonnull final EntityResponse response) { + return response.hasAspects() + && response.getAspects().containsKey(Constants.DATAHUB_CONNECTION_DETAILS_ASPECT_NAME) + && response.getAspects().containsKey(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + } + + private ConnectionMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionUtils.java new file mode 100644 index 00000000000000..bcdd6460ae75ed --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/ConnectionUtils.java @@ -0,0 +1,23 @@ +package com.linkedin.datahub.graphql.resolvers.connection; + +import com.datahub.authorization.AuthUtil; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.metadata.authorization.PoliciesConfig; +import javax.annotation.Nonnull; + +/** Utilities for working with DataHub Connections. */ +public class ConnectionUtils { + + /** + * Returns true if the user is able to read and or write connection between DataHub and external + * platforms. + */ + public static boolean canManageConnections(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_CONNECTIONS_PRIVILEGE); + } + + private ConnectionUtils() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolver.java new file mode 100644 index 00000000000000..3aae612b8cb784 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolver.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.resolvers.connection; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.connection.DataHubConnectionDetailsType; +import com.linkedin.connection.DataHubJsonConnection; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.DataHubConnection; +import com.linkedin.datahub.graphql.generated.UpsertDataHubConnectionInput; +import com.linkedin.entity.EntityResponse; +import com.linkedin.metadata.connection.ConnectionService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class UpsertConnectionResolver implements DataFetcher> { + + private final ConnectionService _connectionService; + private final SecretService _secretService; + + public UpsertConnectionResolver( + @Nonnull final ConnectionService connectionService, + @Nonnull final SecretService secretService) { + _connectionService = + Objects.requireNonNull(connectionService, "connectionService cannot be null"); + _secretService = Objects.requireNonNull(secretService, "secretService cannot be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + + final QueryContext context = environment.getContext(); + final UpsertDataHubConnectionInput input = + bindArgument(environment.getArgument("input"), UpsertDataHubConnectionInput.class); + final Authentication authentication = context.getAuthentication(); + + return CompletableFuture.supplyAsync( + () -> { + if (!ConnectionUtils.canManageConnections(context)) { + throw new AuthorizationException( + "Unauthorized to upsert Connection. Please contact your DataHub administrator for more information."); + } + + try { + final Urn connectionUrn = + _connectionService.upsertConnection( + context.getOperationContext(), + input.getId(), + UrnUtils.getUrn(input.getPlatformUrn()), + DataHubConnectionDetailsType.valueOf(input.getType().toString()), + input.getJson() != null + // Encrypt payload + ? new DataHubJsonConnection() + .setEncryptedBlob(_secretService.encrypt(input.getJson().getBlob())) + : null, + input.getName()); + + final EntityResponse connectionResponse = + _connectionService.getConnectionEntityResponse( + context.getOperationContext(), connectionUrn); + return ConnectionMapper.map(context, connectionResponse, _secretService); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to upsert a Connection from input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 4b8bd37a4fabef..5a3207633c07c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.ContainerEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; @@ -16,25 +19,21 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j public class ContainerEntitiesResolver implements DataFetcher> { - static final List CONTAINABLE_ENTITY_NAMES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME - ); + static final List CONTAINABLE_ENTITY_NAMES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME); private static final String CONTAINER_FIELD_NAME = "container"; private static final String INPUT_ARG_NAME = "input"; private static final String DEFAULT_QUERY = "*"; @@ -55,45 +54,56 @@ public ContainerEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Container) environment.getSource()).getUrn(); - final ContainerEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final ContainerEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : "*"; final int start = input.getStart() != null ? input.getStart() : 0; final int count = input.getCount() != null ? input.getCount() : 20; - return CompletableFuture.supplyAsync(() -> { - - try { - - final Criterion filterCriterion = new Criterion() - .setField(CONTAINER_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - CONTAINABLE_ENTITY_NAMES, - query, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with container with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + + final Criterion filterCriterion = + new Criterion() + .setField(CONTAINER_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + + return UrnSearchResultsMapper.map( + context, + _entityClient.searchAcrossEntities( + context.getOperationContext(), + CONTAINABLE_ENTITY_NAMES, + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + start, + count, + Collections.emptyList(), + null)); + + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve entities associated with container with urn %s", urn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 90fad4ca4578a1..e7c0f6bb0729a8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,15 +15,13 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; - -public class ParentContainersResolver implements DataFetcher> { +public class ParentContainersResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -28,23 +29,27 @@ public ParentContainersResolver(final EntityClient entityClient) { _entityClient = entityClient; } - private void aggregateParentContainers(List containers, String urn, QueryContext context) { + private void aggregateParentContainers( + List containers, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(CONTAINER_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); - EntityResponse response = _entityClient.getV2(containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), containerUrn.getEntityType(), containerUrn, null); if (response != null) { - Container mappedContainer = ContainerMapper.map(response); + Container mappedContainer = ContainerMapper.map(context, response); containers.add(mappedContainer); aggregateParentContainers(containers, mappedContainer.getUrn(), context); } @@ -61,16 +66,22 @@ public CompletableFuture get(DataFetchingEnvironment env final String urn = ((Entity) environment.getSource()).getUrn(); final List containers = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - aggregateParentContainers(containers, urn, context); - final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); - return result; - } catch (DataHubGraphQLException e) { - throw new RuntimeException("Failed to load all containers", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + aggregateParentContainers(containers, urn, context); + final ParentContainersResult result = new ParentContainersResult(); + + List viewable = new ArrayList<>(containers); + + result.setCount(viewable.size()); + result.setContainers(viewable); + return result; + } catch (DataHubGraphQLException e) { + throw new RuntimeException("Failed to load all containers", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index db125384745a10..46d5add9d3f99e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,101 +1,116 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.isViewDatasetUsageAuthorized; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.CorpUser; -import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; +import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - @Slf4j -public class DashboardStatsSummaryResolver implements DataFetcher> { +public class DashboardStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; private final TimeseriesAspectService timeseriesAspectService; - private final Cache summaryCache; public DashboardStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - final DashboardStatsSummary result = new DashboardStatsSummary(); - - // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. - List dashboardUsageMetrics = - getDashboardUsageMetrics(resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); - if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); - } - - // Obtain unique user statistics, by rolling up unique users over the past month. - List userUsageCounts = getDashboardUsagePerUser(resourceUrn); - result.setUniqueUserCountLast30Days(userUsageCounts.size()); - result.setTopUsersLast30Days( - trimUsers(userUsageCounts.stream().map(DashboardUserUsageCounts::getUser).collect(Collectors.toList()))); - - this.summaryCache.put(resourceUrn, result); - return result; - - } catch (Exception e) { - log.error(String.format("Failed to load dashboard usage summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + final QueryContext context = environment.getContext(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + + // TODO: We don't have a dashboard specific priv + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { + log.debug( + "User {} is not authorized to view usage information for {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + + final DashboardStatsSummary result = new DashboardStatsSummary(); + + // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. + List dashboardUsageMetrics = + getDashboardUsageMetrics( + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + if (dashboardUsageMetrics.size() > 0) { + result.setViewCount(getDashboardViewCount(context, resourceUrn)); + } + + // Obtain unique user statistics, by rolling up unique users over the past month. + List userUsageCounts = + getDashboardUsagePerUser(context.getOperationContext(), resourceUrn); + result.setUniqueUserCountLast30Days(userUsageCounts.size()); + result.setTopUsersLast30Days( + trimUsers( + userUsageCounts.stream() + .map(DashboardUserUsageCounts::getUser) + .collect(Collectors.toList()))); + + return result; + + } catch (Exception e) { + log.error( + String.format( + "Failed to load dashboard usage summary for resource %s", + resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }, + this.getClass().getSimpleName(), + "get"); } - private int getDashboardViewCount(final Urn resourceUrn) { - List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), - null, - null, - 1, - this.timeseriesAspectService); + private int getDashboardViewCount(@Nullable QueryContext context, final Urn resourceUrn) { + List dashboardUsageMetrics = + getDashboardUsageMetrics( + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } - private List getDashboardUsagePerUser(final Urn resourceUrn) { + private List getDashboardUsagePerUser( + @Nonnull OperationContext opContext, final Urn resourceUrn) { long now = System.currentTimeMillis(); long nowMinusOneMonth = timeMinusOneMonth(now); - Filter bucketStatsFilter = createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); - return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); + Filter bucketStatsFilter = + createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); + return getUserUsageCounts(opContext, bucketStatsFilter, this.timeseriesAspectService); } - private List trimUsers(final List originalUsers) { + private static List trimUsers(final List originalUsers) { if (originalUsers.size() > MAX_TOP_USERS) { return originalUsers.subList(0, MAX_TOP_USERS); } return originalUsers; } - } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 24e1db33e9d404..7e4a9c8a803880 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUsageQueryResult; @@ -24,18 +28,17 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - /** * Resolver used for resolving the usage statistics of a Dashboard. - *

- * Returns daily as well as absolute usage metrics of Dashboard + * + *

Returns daily as well as absolute usage metrics of Dashboard */ @Slf4j -public class DashboardUsageStatsResolver implements DataFetcher> { +public class DashboardUsageStatsResolver + implements DataFetcher> { private static final String ES_FIELD_EVENT_GRANULARITY = "eventGranularity"; private final TimeseriesAspectService timeseriesAspectService; @@ -44,34 +47,55 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); // Max number of aspects to return for absolute dashboard usage. final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - return CompletableFuture.supplyAsync(() -> { - DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); - // Time Bucket Stats - Filter bucketStatsFilter = createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); - List dailyUsageBuckets = getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); + // Time Bucket Stats + Filter bucketStatsFilter = + createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); + List dailyUsageBuckets = + getBuckets( + context.getOperationContext(), + bucketStatsFilter, + dashboardUrn, + timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + getAggregations( + context.getOperationContext(), + bucketStatsFilter, + dailyUsageBuckets, + timeseriesAspectService); - usageQueryResult.setBuckets(dailyUsageBuckets); - usageQueryResult.setAggregations(aggregations); + usageQueryResult.setBuckets(dailyUsageBuckets); + usageQueryResult.setAggregations(aggregations); - // Absolute usage metrics - List dashboardUsageMetrics = - getDashboardUsageMetrics(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); - usageQueryResult.setMetrics(dashboardUsageMetrics); - return usageQueryResult; - }); + // Absolute usage metrics + List dashboardUsageMetrics = + getDashboardUsageMetrics( + context, dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + usageQueryResult.setMetrics(dashboardUsageMetrics); + return usageQueryResult; + }, + this.getClass().getSimpleName(), + "get"); } - private List getDashboardUsageMetrics(String dashboardUrn, Long maybeStartTimeMillis, - Long maybeEndTimeMillis, Integer maybeLimit) { + private List getDashboardUsageMetrics( + @Nullable QueryContext context, + String dashboardUrn, + Long maybeStartTimeMillis, + Long maybeEndTimeMillis, + Integer maybeLimit) { List dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -79,16 +103,29 @@ private List getDashboardUsageMetrics(String dashboardUrn // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); List aspects = - timeseriesAspectService.getAspectValues(Urn.createFromString(dashboardUrn), Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit, + timeseriesAspectService.getAspectValues( + context.getOperationContext(), + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + dashboardUsageMetrics = + aspects.stream() + .map(a -> DashboardUsageMetricMapper.map(context, a)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 462c18ea33dd44..93c08d37c2e36d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregationMetrics; @@ -27,11 +28,13 @@ import com.linkedin.timeseries.GroupingBucket; import com.linkedin.timeseries.GroupingBucketType; import com.linkedin.timeseries.TimeWindowSize; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardUsageStatsUtils { @@ -41,6 +44,7 @@ public class DashboardUsageStatsUtils { public static final String ES_NULL_VALUE = "NULL"; public static List getDashboardUsageMetrics( + @Nullable QueryContext context, String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, @@ -49,15 +53,20 @@ public static List getDashboardUsageMetrics( List dashboardUsageMetrics; try { Filter filter = createUsageFilter(dashboardUrn, null, null, false); - List aspects = timeseriesAspectService.getAspectValues( - Urn.createFromString(dashboardUrn), - Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + List aspects = + timeseriesAspectService.getAspectValues( + context.getOperationContext(), + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + filter); + dashboardUsageMetrics = + aspects.stream() + .map(m -> DashboardUsageMetricMapper.map(context, m)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } @@ -65,12 +74,15 @@ public static List getDashboardUsageMetrics( } public static DashboardUsageQueryResultAggregations getAggregations( + @Nonnull OperationContext opContext, Filter filter, List dailyUsageBuckets, TimeseriesAspectService timeseriesAspectService) { - List userUsageCounts = getUserUsageCounts(filter, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = new DashboardUsageQueryResultAggregations(); + List userUsageCounts = + getUserUsageCounts(opContext, filter, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + new DashboardUsageQueryResultAggregations(); aggregations.setUsers(userUsageCounts); aggregations.setUniqueUserCount(userUsageCounts.size()); @@ -99,29 +111,51 @@ public static DashboardUsageQueryResultAggregations getAggregations( } public static List getBuckets( + @Nonnull OperationContext opContext, Filter filter, String dashboardUrn, TimeseriesAspectService timeseriesAspectService) { AggregationSpec usersCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountAggregation = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("viewsCount"); AggregationSpec executionsCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("executionsCount"); AggregationSpec usersCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("viewsCount"); AggregationSpec executionsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{usersCountAggregation, viewsCountAggregation, executionsCountAggregation, - usersCountCardinalityAggregation, viewsCountCardinalityAggregation, executionsCountCardinalityAggregation}; - GenericTable dailyStats = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, - createUsageGroupingBuckets(CalendarInterval.DAY)); + new AggregationSpec[] { + usersCountAggregation, + viewsCountAggregation, + executionsCountAggregation, + usersCountCardinalityAggregation, + viewsCountCardinalityAggregation, + executionsCountCardinalityAggregation + }; + GenericTable dailyStats = + timeseriesAspectService.getAggregatedStats( + opContext, + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + createUsageGroupingBuckets(CalendarInterval.DAY)); List buckets = new ArrayList<>(); for (StringArray row : dailyStats.getRows()) { @@ -130,7 +164,8 @@ public static List getBuckets( usageAggregation.setDuration(WindowDuration.DAY); usageAggregation.setResource(dashboardUrn); - DashboardUsageAggregationMetrics usageAggregationMetrics = new DashboardUsageAggregationMetrics(); + DashboardUsageAggregationMetrics usageAggregationMetrics = + new DashboardUsageAggregationMetrics(); if (!row.get(1).equals(ES_NULL_VALUE) && !row.get(4).equals(ES_NULL_VALUE)) { try { @@ -156,7 +191,8 @@ public static List getBuckets( usageAggregationMetrics.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert executionsCount from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert executionsCount from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -165,34 +201,62 @@ public static List getBuckets( return buckets; } - public static List getUserUsageCounts(Filter filter, TimeseriesAspectService timeseriesAspectService) { + public static List getUserUsageCounts( + @Nonnull OperationContext opContext, + Filter filter, + TimeseriesAspectService timeseriesAspectService) { // Sum aggregation on userCounts.count AggregationSpec sumUsageCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.usageCount"); AggregationSpec sumViewCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.viewsCount"); AggregationSpec sumExecutionCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.executionsCount"); AggregationSpec usageCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.usageCount"); AggregationSpec viewCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.viewsCount"); AggregationSpec executionCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY) + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) .setFieldPath("userCounts.executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{sumUsageCountsCountAggSpec, sumViewCountsCountAggSpec, sumExecutionCountsCountAggSpec, - usageCountsCardinalityAggSpec, viewCountsCardinalityAggSpec, executionCountsCardinalityAggSpec}; + new AggregationSpec[] { + sumUsageCountsCountAggSpec, + sumViewCountsCountAggSpec, + sumExecutionCountsCountAggSpec, + usageCountsCardinalityAggSpec, + viewCountsCardinalityAggSpec, + executionCountsCardinalityAggSpec + }; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend - GenericTable result = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, groupingBuckets); + GenericTable result = + timeseriesAspectService.getAggregatedStats( + opContext, + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -208,7 +272,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setUsageCount(Integer.valueOf(row.get(1))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE) && row.get(5).equals(ES_NULL_VALUE)) { @@ -217,7 +282,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setViewsCount(Integer.valueOf(row.get(2))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user views count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user views count from ES to int", e); } } if (!row.get(3).equals(ES_NULL_VALUE) && !row.get(6).equals(ES_NULL_VALUE)) { @@ -226,7 +292,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user executions count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user executions count from ES to int", e); } } userUsageCounts.add(userUsageCount); @@ -239,17 +306,15 @@ public static List getUserUsageCounts(Filter filter, T private static GroupingBucket[] createUsageGroupingBuckets(CalendarInterval calenderInterval) { GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(calenderInterval)); - return new GroupingBucket[]{timestampBucket}; + return new GroupingBucket[] {timestampBucket}; } public static Filter createUsageFilter( - String dashboardUrn, - Long startTime, - Long endTime, - boolean byBucket) { + String dashboardUrn, Long startTime, Long endTime, boolean byBucket) { Filter filter = new Filter(); final ArrayList criteria = new ArrayList<>(); @@ -260,44 +325,55 @@ public static Filter createUsageFilter( if (startTime != null) { // Add filter for start time - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(Long.toString(startTime)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(Long.toString(startTime)); criteria.add(startTimeCriterion); } if (endTime != null) { // Add filter for end time - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(Long.toString(endTime)); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(Long.toString(endTime)); criteria.add(endTimeCriterion); } if (byBucket) { - // Add filter for presence of eventGranularity - only consider bucket stats and not absolute stats + // Add filter for presence of eventGranularity - only consider bucket stats and not absolute + // stats // since unit is mandatory, we assume if eventGranularity contains unit, then it is not null Criterion onlyTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.CONTAIN).setValue("unit"); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.CONTAIN) + .setValue("unit"); criteria.add(onlyTimeBucketsCriterion); } else { // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); } - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } - public static Long timeMinusOneMonth(long time) { final long oneHourMillis = 60 * 60 * 1000; final long oneDayMillis = 24 * oneHourMillis; return time - (31 * oneDayMillis + 1); } - private DashboardUsageStatsUtils() { } + private DashboardUsageStatsUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtils.java new file mode 100644 index 00000000000000..3dd7cd9df63838 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtils.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.metadata.authorization.PoliciesConfig; +import javax.annotation.Nonnull; + +public class DataContractUtils { + + public static boolean canEditDataContract(@Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthorizationUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_CONTRACT_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups); + } + + private DataContractUtils() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolver.java new file mode 100644 index 00000000000000..338e7a0015b5a5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolver.java @@ -0,0 +1,96 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.types.datacontract.DataContractMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class EntityDataContractResolver implements DataFetcher> { + static final String CONTRACT_FOR_RELATIONSHIP = "ContractFor"; + + private final EntityClient _entityClient; + private final GraphClient _graphClient; + + public EntityDataContractResolver( + final EntityClient entityClient, final GraphClient graphClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + _graphClient = Objects.requireNonNull(graphClient, "graphClient must not be null"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + + try { + // Step 1: Fetch the contract associated with the dataset. + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(CONTRACT_FOR_RELATIONSHIP), + RelationshipDirection.INCOMING, + 0, + 1, + context.getActorUrn()); + + // If we found multiple contracts for same entity, we have an invalid system state. Log + // a warning. + if (relationships.getTotal() > 1) { + // Someone created 2 contracts for the same entity. Currently, we do not handle this + // in the UI. + log.warn( + String.format( + "Unexpectedly found multiple contracts (%s) for entity with urn %s! This may lead to inconsistent behavior.", + relationships.getRelationships(), entityUrn)); + } + + final List contractUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); + + if (!contractUrns.isEmpty()) { + final Urn contractUrn = contractUrns.get(0); + + // Step 2: Hydrate the contract entities based on the urns from step 1 + final EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + Constants.DATA_CONTRACT_ENTITY_NAME, + contractUrn, + null); + + if (entityResponse != null) { + // Step 4: Package and return result + return DataContractMapper.mapContract(entityResponse); + } + } + // No contract found + return null; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Data Contract from GMS", e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolver.java new file mode 100644 index 00000000000000..955a4ed0ee6b2b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolver.java @@ -0,0 +1,278 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datacontract.DataContractProperties; +import com.linkedin.datacontract.DataContractState; +import com.linkedin.datacontract.DataContractStatus; +import com.linkedin.datacontract.DataQualityContract; +import com.linkedin.datacontract.DataQualityContractArray; +import com.linkedin.datacontract.FreshnessContract; +import com.linkedin.datacontract.FreshnessContractArray; +import com.linkedin.datacontract.SchemaContract; +import com.linkedin.datacontract.SchemaContractArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.DataQualityContractInput; +import com.linkedin.datahub.graphql.generated.FreshnessContractInput; +import com.linkedin.datahub.graphql.generated.SchemaContractInput; +import com.linkedin.datahub.graphql.generated.UpsertDataContractInput; +import com.linkedin.datahub.graphql.types.datacontract.DataContractMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.key.DataContractKey; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class UpsertDataContractResolver implements DataFetcher> { + + private static final DataContractState DEFAULT_CONTRACT_STATE = DataContractState.ACTIVE; + private static final String CONTRACT_RELATIONSHIP_TYPE = "ContractFor"; + private final EntityClient _entityClient; + private final GraphClient _graphClient; + + public UpsertDataContractResolver( + final EntityClient entityClient, final GraphClient graphClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient cannot be null"); + _graphClient = Objects.requireNonNull(graphClient, "graphClient cannot be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final UpsertDataContractInput input = + bindArgument(environment.getArgument("input"), UpsertDataContractInput.class); + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + return CompletableFuture.supplyAsync( + () -> { + if (DataContractUtils.canEditDataContract(context, entityUrn)) { + + // Verify that the provided contract, dataset, assertions all exist as valid entities. + validateInput(entityUrn, input, context); + + // First determine if there is an existing data contract + final Urn maybeExistingContractUrn = + getEntityContractUrn(entityUrn, context.getAuthentication()); + + final DataContractProperties newProperties = mapInputToProperties(entityUrn, input); + final DataContractStatus newStatus = mapInputToStatus(input); + + final Urn urn = + maybeExistingContractUrn != null + ? maybeExistingContractUrn + : EntityKeyUtils.convertEntityKeyToUrn( + new DataContractKey() + .setId( + input.getId() != null + ? input.getId() + : UUID.randomUUID().toString()), + Constants.DATA_CONTRACT_ENTITY_NAME); + + final MetadataChangeProposal propertiesProposal = + buildMetadataChangeProposalWithUrn( + urn, Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, newProperties); + + final MetadataChangeProposal statusProposal = + buildMetadataChangeProposalWithUrn( + urn, Constants.DATA_CONTRACT_STATUS_ASPECT_NAME, newStatus); + + try { + _entityClient.batchIngestProposals( + context.getOperationContext(), + ImmutableList.of(propertiesProposal, statusProposal), + false); + + // Hydrate the contract entities based on the urns from step 1 + final EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + Constants.DATA_CONTRACT_ENTITY_NAME, + urn, + null); + + // Package and return result + return DataContractMapper.mapContract(entityResponse); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); + } + + private void validateInput( + @Nonnull final Urn entityUrn, + @Nonnull final UpsertDataContractInput input, + @Nonnull final QueryContext context) { + try { + + // Validate the target entity exists + if (!_entityClient.exists(context.getOperationContext(), entityUrn)) { + throw new DataHubGraphQLException( + String.format("Provided entity with urn %s does not exist!", entityUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Verify Freshness assertions + if (input.getFreshness() != null) { + final List freshnessInputs = input.getFreshness(); + for (FreshnessContractInput freshnessInput : freshnessInputs) { + final Urn assertionUrn = UrnUtils.getUrn(freshnessInput.getAssertionUrn()); + if (!_entityClient.exists(context.getOperationContext(), assertionUrn)) { + throw new DataHubGraphQLException( + String.format("Provided assertion with urn %s does not exist!", assertionUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } + } + + // Verify Schema assertions + if (input.getSchema() != null) { + final List schemaInputs = input.getSchema(); + for (SchemaContractInput schemaInput : schemaInputs) { + final Urn assertionUrn = UrnUtils.getUrn(schemaInput.getAssertionUrn()); + if (!_entityClient.exists(context.getOperationContext(), assertionUrn)) { + throw new DataHubGraphQLException( + String.format("Provided assertion with urn %s does not exist!", assertionUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } + } + + // Verify DQ assertions + if (input.getDataQuality() != null) { + final List dqInputs = input.getDataQuality(); + for (DataQualityContractInput dqInput : dqInputs) { + final Urn assertionUrn = UrnUtils.getUrn(dqInput.getAssertionUrn()); + if (!_entityClient.exists(context.getOperationContext(), assertionUrn)) { + throw new DataHubGraphQLException( + String.format("Provided assertion with urn %s does not exist!", assertionUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } + } + } catch (Exception e) { + if (e instanceof DataHubGraphQLException) { + throw (DataHubGraphQLException) e; + } else { + log.error( + "Failed to validate inputs provided when upserting data contract! Failing the create.", + e); + throw new DataHubGraphQLException( + "Failed to verify inputs. An unknown error occurred!", + DataHubGraphQLErrorCode.SERVER_ERROR); + } + } + } + + @Nullable + private Urn getEntityContractUrn(@Nonnull Urn entityUrn, @Nonnull Authentication authentication) { + EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn.toString(), + ImmutableList.of(CONTRACT_RELATIONSHIP_TYPE), + RelationshipDirection.INCOMING, + 0, + 1, + authentication.getActor().toUrnStr()); + + if (relationships.getTotal() > 1) { + // Bad state - There are multiple contracts for a single entity! Cannot update. + log.warn( + String.format( + "Unexpectedly found multiple contracts (%s) for entity with urn %s! This may lead to inconsistent behavior.", + relationships.getRelationships(), entityUrn)); + } + + if (relationships.getRelationships().size() == 1) { + return relationships.getRelationships().get(0).getEntity(); + } + // No Contract Found + return null; + } + + private DataContractProperties mapInputToProperties( + @Nonnull final Urn entityUrn, @Nonnull final UpsertDataContractInput input) { + final DataContractProperties result = new DataContractProperties(); + result.setEntity(entityUrn); + + // Construct the dataset contract. + if (input.getFreshness() != null) { + result.setFreshness( + new FreshnessContractArray( + input.getFreshness().stream() + .map(this::mapFreshnessInput) + .collect(Collectors.toList()))); + } + + if (input.getSchema() != null) { + result.setSchema( + new SchemaContractArray( + input.getSchema().stream().map(this::mapSchemaInput).collect(Collectors.toList()))); + } + + if (input.getDataQuality() != null) { + result.setDataQuality( + new DataQualityContractArray( + input.getDataQuality().stream() + .map(this::mapDataQualityInput) + .collect(Collectors.toList()))); + } + + return result; + } + + private DataContractStatus mapInputToStatus(@Nonnull final UpsertDataContractInput input) { + final DataContractStatus result = new DataContractStatus(); + if (input.getState() != null) { + result.setState(DataContractState.valueOf(input.getState().toString())); + } else { + result.setState(DEFAULT_CONTRACT_STATE); + } + return result; + } + + private FreshnessContract mapFreshnessInput(@Nonnull final FreshnessContractInput input) { + final FreshnessContract result = new FreshnessContract(); + result.setAssertion(UrnUtils.getUrn(input.getAssertionUrn())); + return result; + } + + private SchemaContract mapSchemaInput(@Nonnull final SchemaContractInput input) { + final SchemaContract result = new SchemaContract(); + result.setAssertion(UrnUtils.getUrn(input.getAssertionUrn())); + return result; + } + + private DataQualityContract mapDataQualityInput(@Nonnull final DataQualityContractInput input) { + final DataQualityContract result = new DataQualityContract(); + result.setAssertion(UrnUtils.getUrn(input.getAssertionUrn())); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java index 9c32fa1c080762..f128b9d27f997d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java @@ -1,22 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchSetDataProductInput; import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -27,54 +27,82 @@ public class BatchSetDataProductResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDataProductInput input = bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); + final BatchSetDataProductInput input = + bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); final String maybeDataProductUrn = input.getDataProductUrn(); final List resources = input.getResourceUrns(); - return CompletableFuture.supplyAsync(() -> { - - verifyResources(resources, context); - verifyDataProduct(maybeDataProductUrn, context); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + verifyResources(resources, context); + verifyDataProduct(maybeDataProductUrn, context); - try { - List resourceUrns = resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (maybeDataProductUrn != null) { - batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); - } else { - batchUnsetDataProduct(resourceUrns, context); - } - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + List resourceUrns = + resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + if (maybeDataProductUrn != null) { + batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); + } else { + batchUnsetDataProduct(resourceUrns, context); + } + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } private void verifyResources(List resources, QueryContext context) { for (String resource : resources) { - if (!_dataProductService.verifyEntityExists(UrnUtils.getUrn(resource), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, %s in resources does not exist", resource)); + if (!_dataProductService.verifyEntityExists( + context.getOperationContext(), UrnUtils.getUrn(resource))) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, %s in resources does not exist", resource)); } Urn resourceUrn = UrnUtils.getUrn(resource); - if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity( + context, resourceUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } } private void verifyDataProduct(String maybeDataProductUrn, QueryContext context) { - if (maybeDataProductUrn != null && !_dataProductService.verifyEntityExists(UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, Data Product urn %s does not exist", maybeDataProductUrn)); + if (maybeDataProductUrn != null + && !_dataProductService.verifyEntityExists( + context.getOperationContext(), UrnUtils.getUrn(maybeDataProductUrn))) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, Data Product urn %s does not exist", + maybeDataProductUrn)); } } - private void batchSetDataProduct(@Nonnull String dataProductUrn, List resources, QueryContext context) { - log.debug("Batch setting Data Product. dataProduct urn: {}, resources: {}", dataProductUrn, resources); + private void batchSetDataProduct( + @Nonnull String dataProductUrn, List resources, QueryContext context) { + log.debug( + "Batch setting Data Product. dataProduct urn: {}, resources: {}", + dataProductUrn, + resources); try { - _dataProductService.batchSetDataProduct(UrnUtils.getUrn(dataProductUrn), resources, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.batchSetDataProduct( + context.getOperationContext(), + UrnUtils.getUrn(dataProductUrn), + resources, + UrnUtils.getUrn(context.getActorUrn())); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Data Product %s to resources with urns %s!", dataProductUrn, resources), e); + throw new RuntimeException( + String.format( + "Failed to batch set Data Product %s to resources with urns %s!", + dataProductUrn, resources), + e); } } @@ -82,10 +110,14 @@ private void batchUnsetDataProduct(List resources, QueryContext context) { log.debug("Batch unsetting Data Product. resources: {}", resources); try { for (Urn resource : resources) { - _dataProductService.unsetDataProduct(resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.unsetDataProduct( + context.getOperationContext(), resource, UrnUtils.getUrn(context.getActorUrn())); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch unset data product for resources with urns %s!", resources), e); + throw new RuntimeException( + String.format( + "Failed to batch unset data product for resources with urns %s!", resources), + e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index f644ff31a571b4..470267264f12f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateDataProductInput; import com.linkedin.datahub.graphql.generated.DataProduct; @@ -12,13 +15,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class CreateDataProductResolver implements DataFetcher> { @@ -26,37 +26,51 @@ public class CreateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateDataProductInput input = bindArgument(environment.getArgument("input"), CreateDataProductInput.class); + final CreateDataProductInput input = + bindArgument(environment.getArgument("input"), CreateDataProductInput.class); final Authentication authentication = context.getAuthentication(); final Urn domainUrn = UrnUtils.getUrn(input.getDomainUrn()); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Domain provided dos not exist"); - } - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - final Urn dataProductUrn = _dataProductService.createDataProduct( - input.getProperties().getName(), - input.getProperties().getDescription(), - authentication); - _dataProductService.setDomain(dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new DataProduct from input %s", input), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists(context.getOperationContext(), domainUrn)) { + throw new IllegalArgumentException("The Domain provided dos not exist"); + } + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + final Urn dataProductUrn = + _dataProductService.createDataProduct( + context.getOperationContext(), + input.getId(), + input.getProperties().getName(), + input.getProperties().getDescription()); + _dataProductService.setDomain( + context.getOperationContext(), + dataProductUrn, + UrnUtils.getUrn(input.getDomainUrn())); + EntityResponse response = + _dataProductService.getDataProductEntityResponse( + context.getOperationContext(), dataProductUrn); + if (response != null) { + return DataProductMapper.map(context, response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new DataProduct from input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java index 596e292e7fe337..f6fe11a587a39b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java @@ -7,25 +7,27 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.authorization.PoliciesConfig; -import lombok.extern.slf4j.Slf4j; - import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DataProductAuthorizationUtils { - private DataProductAuthorizationUtils() { + private DataProductAuthorizationUtils() {} - } - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDataProductsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -35,11 +37,14 @@ public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryCo orPrivilegeGroups); } - public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext context, Urn domainUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToManageDataProducts( + @Nonnull QueryContext context, Urn domainUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -49,10 +54,10 @@ public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext con orPrivilegeGroups); } - public static boolean isAuthorizedToEditDataProduct(@Nonnull QueryContext context, Urn dataProductUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP - )); + public static boolean isAuthorizedToEditDataProduct( + @Nonnull QueryContext context, Urn dataProductUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(ALL_PRIVILEGES_GROUP)); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java index fd31e2199c22a1..25c4529abf3ce7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java @@ -4,16 +4,16 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.domain.Domains; import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class DeleteDataProductResolver implements DataFetcher> { @@ -21,32 +21,40 @@ public class DeleteDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } - - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } - - try { - _dataProductService.deleteDataProduct(dataProductUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Data Product", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + context.getOperationContext(), dataProductUrn)) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } + + Domains domains = + _dataProductService.getDataProductDomains( + context.getOperationContext(), dataProductUrn); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } + + try { + _dataProductService.deleteDataProduct(context.getOperationContext(), dataProductUrn); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Data Product", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index e727ebe1858389..320d89cdec164a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.dataproduct.DataProductAssociation; import com.linkedin.dataproduct.DataProductProperties; @@ -22,18 +26,14 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * Resolver responsible for getting the assets belonging to a Data Product. Get the assets from the @@ -41,7 +41,8 @@ */ @Slf4j @RequiredArgsConstructor -public class ListDataProductAssetsResolver implements DataFetcher> { +public class ListDataProductAssetsResolver + implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -52,7 +53,10 @@ public class ListDataProductAssetsResolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); // get urn from either input or source (in the case of "entities" field) - final String urn = environment.getArgument("urn") != null ? environment.getArgument("urn") : ((DataProduct) environment.getSource()).getUrn(); + final String urn = + environment.getArgument("urn") != null + ? environment.getArgument("urn") + : ((DataProduct) environment.getSource()).getUrn(); final Urn dataProductUrn = UrnUtils.getUrn(urn); final SearchAcrossEntitiesInput input = bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); @@ -60,32 +64,52 @@ public CompletableFuture get(DataFetchingEnvironment environment) // 1. Get urns of assets belonging to Data Product using an aspect query List assetUrns = new ArrayList<>(); try { - final EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data(); + final EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)); + if (entityResponse != null + && entityResponse + .getAspects() + .containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + final DataMap data = + entityResponse + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); final DataProductProperties dataProductProperties = new DataProductProperties(data); if (dataProductProperties.hasAssets()) { - assetUrns.addAll(dataProductProperties.getAssets().stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList())); + assetUrns.addAll( + dataProductProperties.getAssets().stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList())); } } } catch (Exception e) { log.error(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); - throw new RuntimeException(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to list data product assets with urn %s", dataProductUrn), e); } // 2. Get list of entities that we should query based on filters or assets from aspect. - List entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).collect(Collectors.toList()); - - - final List inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - final List finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; + List entitiesToQuery = + assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); + + final List inputEntityTypes = + (input.getTypes() == null || input.getTypes().isEmpty()) + ? ImmutableList.of() + : input.getTypes(); + final List inputEntityNames = + inputEntityTypes.stream() + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + + final List finalEntityNames = + inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); @@ -93,49 +117,73 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - // if no assets in data product properties, exit early before search and return empty results - if (assetUrns.size() == 0) { - SearchResults results = new SearchResults(); - results.setStart(start); - results.setCount(count); - results.setTotal(0); - results.setSearchResults(ImmutableList.of()); - return results; - } - - // add urns from the aspect to our filters - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } - - try { - log.debug( - "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - finalEntityNames, - sanitizedQuery, - finalFilter, - start, - count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + // if no assets in data product properties, exit early before search and return empty + // results + if (assetUrns.size() == 0) { + SearchResults results = new SearchResults(); + results.setStart(start); + results.setCount(count); + results.setTotal(0); + results.setSearchResults(ImmutableList.of()); + return results; + } + + // add urns from the aspect to our filters + final Filter baseFilter = + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()); + final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); + + final SearchFlags searchFlags; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); + } else { + searchFlags = null; + } + + try { + log.debug( + "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + context, + _entityClient.searchAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), + finalEntityNames, + sanitizedQuery, + finalFilter, + start, + count, + null)); + } catch (Exception e) { + log.error( + "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 79afddbb873fbc..e49cdcfe94eb49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.UpdateDataProductInput; @@ -13,13 +16,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class UpdateDataProductResolver implements DataFetcher> { @@ -27,43 +27,57 @@ public class UpdateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateDataProductInput input = bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); + final UpdateDataProductInput input = + bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + context.getOperationContext(), dataProductUrn)) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } + Domains domains = + _dataProductService.getDataProductDomains( + context.getOperationContext(), dataProductUrn); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } - try { - final Urn urn = _dataProductService.updateDataProduct( - dataProductUrn, - input.getName(), - input.getDescription(), - authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); - } - }); + try { + final Urn urn = + _dataProductService.updateDataProduct( + context.getOperationContext(), + dataProductUrn, + input.getName(), + input.getDescription()); + EntityResponse response = + _dataProductService.getDataProductEntityResponse( + context.getOperationContext(), urn); + if (response != null) { + return DataProductMapper.map(context, response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java index 1587df4c9899b3..f38cf80f36ceb9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java @@ -7,6 +7,7 @@ import com.linkedin.data.template.StringArray; import com.linkedin.data.template.StringArrayArray; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Health; import com.linkedin.datahub.graphql.generated.HealthStatus; @@ -28,24 +29,24 @@ import com.linkedin.timeseries.GroupingBucketType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.AllArgsConstructor; import lombok.Data; import lombok.extern.slf4j.Slf4j; - /** * Resolver used for resolving the Health state of a Dataset. * - * Currently, the health status is calculated via the validation on a Dataset. If there are no validations found, the - * health status will be undefined for the Dataset. - * + *

Currently, the health status is calculated via the validation on a Dataset. If there are no + * validations found, the health status will be undefined for the Dataset. */ @Slf4j public class DatasetHealthResolver implements DataFetcher>> { @@ -60,47 +61,50 @@ public class DatasetHealthResolver implements DataFetcher _statusCache; public DatasetHealthResolver( - final GraphClient graphClient, - final TimeseriesAspectService timeseriesAspectService) { + final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService) { this(graphClient, timeseriesAspectService, new Config(true)); - } + public DatasetHealthResolver( final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService, final Config config) { _graphClient = graphClient; _timeseriesAspectService = timeseriesAspectService; - _statusCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(1, TimeUnit.MINUTES) - .build(); + _statusCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(1, TimeUnit.MINUTES).build(); _config = config; } @Override - public CompletableFuture> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final Dataset parent = environment.getSource(); - return CompletableFuture.supplyAsync(() -> { - try { - final CachedHealth cachedStatus = _statusCache.get(parent.getUrn(), () -> ( - computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); - return cachedStatus.healths; - } catch (Exception e) { - throw new RuntimeException("Failed to resolve dataset's health status.", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> + (computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve dataset's health status.", e); + } + }, + this.getClass().getSimpleName(), + "get"); } /** * Computes the "resolved health status" for a Dataset by * - * - fetching active (non-deleted) assertions - * - fetching latest assertion run for each - * - checking whether any of the assertions latest runs are failing - * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing */ - private CachedHealth computeHealthStatusForDataset(final String datasetUrn, final QueryContext context) { + private CachedHealth computeHealthStatusForDataset( + final String datasetUrn, final QueryContext context) { final List healthStatuses = new ArrayList<>(); if (_config.getAssertionsEnabled()) { @@ -113,61 +117,68 @@ private CachedHealth computeHealthStatusForDataset(final String datasetUrn, fina } /** - * Returns the resolved "assertions health", which is currently a static function of whether the most recent run of - * all dataset assertions has succeeded. + * Returns the resolved "assertions health", which is currently a static function of whether the + * most recent run of all dataset assertions has succeeded. * * @param datasetUrn the dataset to compute health for * @param context the query context * @return an instance of {@link Health} for the Dataset, null if one cannot be computed. */ @Nullable - private Health computeAssertionHealthForDataset(final String datasetUrn, final QueryContext context) { + private Health computeAssertionHealthForDataset( + final String datasetUrn, final QueryContext context) { // Get active assertion urns - final EntityRelationships relationships = _graphClient.getRelatedEntities( - datasetUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - 0, - 500, - context.getActorUrn() - ); + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + datasetUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); if (relationships.getTotal() > 0) { // If there are assertions defined, then we should return a non-null health for this asset. - final Set activeAssertionUrns = relationships.getRelationships() - .stream() - .map(relationship -> relationship.getEntity().toString()).collect(Collectors.toSet()); + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); - final GenericTable assertionRunResults = getAssertionRunsTable(datasetUrn); + final GenericTable assertionRunResults = + getAssertionRunsTable(context.getOperationContext(), datasetUrn); if (!assertionRunResults.hasRows() || assertionRunResults.getRows().size() == 0) { // No assertion run results found. Return empty health! return null; } - final List failingAssertionUrns = getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); // Finally compute & return the health. final Health health = new Health(); health.setType(HealthStatusType.ASSERTIONS); if (failingAssertionUrns.size() > 0) { health.setStatus(HealthStatus.FAIL); - health.setMessage(String.format("%s of %s assertions are failing", failingAssertionUrns.size(), - activeAssertionUrns.size())); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); health.setCauses(failingAssertionUrns); } else { health.setStatus(HealthStatus.PASS); health.setMessage("All assertions are passing"); } return health; - } return null; } - private GenericTable getAssertionRunsTable(final String asserteeUrn) { + private GenericTable getAssertionRunsTable( + @Nonnull OperationContext opContext, final String asserteeUrn) { return _timeseriesAspectService.getAggregatedStats( + opContext, Constants.ASSERTION_ENTITY_NAME, Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, createAssertionAggregationSpecs(), @@ -175,7 +186,8 @@ private GenericTable getAssertionRunsTable(final String asserteeUrn) { createAssertionGroupingBuckets()); } - private List getFailingAssertionUrns(final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { // Create the buckets based on the result return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); } @@ -191,12 +203,15 @@ private Filter createAssertionsFilter(final String datasetUrn) { // Add filter for result == result Criterion startTimeCriterion = - new Criterion().setField("status").setCondition(Condition.EQUAL).setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); criteria.add(startTimeCriterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } @@ -205,31 +220,38 @@ private AggregationSpec[] createAssertionAggregationSpecs() { AggregationSpec resultTypeAggregation = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); AggregationSpec timestampAggregation = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("timestampMillis"); - return new AggregationSpec[]{resultTypeAggregation, timestampAggregation}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; } private GroupingBucket[] createAssertionGroupingBuckets() { // String grouping bucket on "assertionUrn" GroupingBucket assertionUrnBucket = new GroupingBucket(); assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - return new GroupingBucket[]{assertionUrnBucket}; + return new GroupingBucket[] {assertionUrnBucket}; } - private List resultToFailedAssertionUrns(final StringArrayArray rows, final Set activeAssertionUrns) { + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { final List failedAssertionUrns = new ArrayList<>(); for (StringArray row : rows) { // Result structure should be assertionUrn, event.result.type, timestampMillis if (row.size() != 3) { - throw new RuntimeException(String.format( - "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", row.size())); + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); } final String assertionUrn = row.get(0); final String resultType = row.get(1); - // If assertion is "active" (not deleted) & is failing, then we report a degradation in health. - if (activeAssertionUrns.contains(assertionUrn) && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { failedAssertionUrns.add(assertionUrn); } } @@ -246,4 +268,4 @@ public static class Config { private static class CachedHealth { private final List healths; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 2873866bb34f73..7d3603ec050e94 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -1,92 +1,89 @@ package com.linkedin.datahub.graphql.resolvers.dataset; -import com.datahub.authorization.EntitySpec; -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.metadata.authorization.PoliciesConfig; -import com.linkedin.usage.UsageClient; +import com.linkedin.metadata.client.UsageStatsJavaClient; import com.linkedin.usage.UsageTimeRange; import com.linkedin.usage.UserUsageCounts; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - /** * This resolver is a thin wrapper around the {@link DatasetUsageStatsResolver} which simply * computes some aggregate usage metrics for a Dashboard. */ @Slf4j -public class DatasetStatsSummaryResolver implements DataFetcher> { +public class DatasetStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; - private final UsageClient usageClient; - private final Cache summaryCache; + private final UsageStatsJavaClient usageClient; - public DatasetStatsSummaryResolver(final UsageClient usageClient) { + public DatasetStatsSummaryResolver(final UsageStatsJavaClient usageClient) { this.usageClient = usageClient; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) // TODO: Make caching duration configurable externally. - .build(); } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view profile information for dataset {}", + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.isViewDatasetUsageAuthorized(context, resourceUrn)) { + log.debug( + "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), resourceUrn.toString()); - return null; - } - - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); - - final DatasetStatsSummary result = new DatasetStatsSummary(); - result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); - result.setUniqueUserCountLast30Days(usageQueryResult.getAggregations().getUniqueUserCount()); - if (usageQueryResult.getAggregations().hasUsers()) { - result.setTopUsersLast30Days(trimUsers(usageQueryResult.getAggregations().getUsers() - .stream() - .filter(UserUsageCounts::hasUser) - .sorted((a, b) -> (b.getCount() - a.getCount())) - .map(userCounts -> createPartialUser(Objects.requireNonNull(userCounts.getUser()))) - .collect(Collectors.toList()))); - } - this.summaryCache.put(resourceUrn, result); - return result; - } catch (Exception e) { - log.error(String.format("Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return null; + } + + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats( + context.getOperationContext(), resourceUrn.toString(), UsageTimeRange.MONTH); + + final DatasetStatsSummary result = new DatasetStatsSummary(); + result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); + result.setUniqueUserCountLast30Days( + usageQueryResult.getAggregations().getUniqueUserCount()); + if (usageQueryResult.getAggregations().hasUsers()) { + result.setTopUsersLast30Days( + trimUsers( + usageQueryResult.getAggregations().getUsers().stream() + .filter(UserUsageCounts::hasUser) + .sorted((a, b) -> (b.getCount() - a.getCount())) + .map( + userCounts -> + createPartialUser(Objects.requireNonNull(userCounts.getUser()))) + .collect(Collectors.toList()))); + } + + return result; + } catch (Exception e) { + log.error( + String.format( + "Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }, + this.getClass().getSimpleName(), + "get"); } private List trimUsers(final List originalUsers) { @@ -101,10 +98,4 @@ private CorpUser createPartialUser(final Urn userUrn) { result.setUrn(userUrn.toString()); return result; } - - private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index e4bec8e896fdf7..bed866db0fb3dc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -1,23 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.dataset; -import com.datahub.authorization.EntitySpec; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.isViewDatasetUsageAuthorized; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.UsageQueryResult; import com.linkedin.datahub.graphql.types.usage.UsageQueryResultMapper; -import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.usage.UsageClient; import com.linkedin.usage.UsageTimeRange; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DatasetUsageStatsResolver implements DataFetcher> { @@ -28,31 +27,34 @@ public DatasetUsageStatsResolver(final UsageClient usageClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final UsageTimeRange range = UsageTimeRange.valueOf(environment.getArgument("range")); - return CompletableFuture.supplyAsync(() -> { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view usage information for dataset {}", - context.getActorUrn(), - resourceUrn.toString()); - return null; - } - try { - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); - } - }); - } - - private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { + log.debug( + "User {} is not authorized to view usage information for dataset {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + try { + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats( + context.getOperationContext(), resourceUrn.toString(), range); + return UsageQueryResultMapper.map(context, usageQueryResult); + } catch (Exception e) { + log.error(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + MetricUtils.counter(this.getClass(), "usage_stats_dropped").inc(); + } + + return UsageQueryResultMapper.EMPTY; + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/IsAssignedToMeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/IsAssignedToMeResolver.java new file mode 100644 index 00000000000000..16a321d64f74eb --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/IsAssignedToMeResolver.java @@ -0,0 +1,43 @@ +package com.linkedin.datahub.graphql.resolvers.dataset; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.Role; +import com.linkedin.datahub.graphql.generated.RoleUser; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IsAssignedToMeResolver implements DataFetcher> { + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final Role role = environment.getSource(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Set assignedUserUrns = + role.getActors() != null && role.getActors().getUsers() != null + ? role.getActors().getUsers().stream() + .map(RoleUser::getUser) + .map(CorpUser::getUrn) + .collect(Collectors.toSet()) + : Collections.emptySet(); + return assignedUserUrns.contains(context.getActorUrn()); + } catch (Exception e) { + throw new RuntimeException( + "Failed to determine if current user is assigned to Role", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 75c09d0cf7e437..c568ff6db3a27d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,16 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -18,18 +23,16 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor @@ -37,48 +40,64 @@ public class UpdateDeprecationResolver implements DataFetcher + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateDeprecationInput input = bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); + final UpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); final Urn entityUrn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateDeprecationInput( - entityUrn, - _entityService - ); - try { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DEPRECATION_ASPECT_NAME, - _entityService, - new Deprecation()); - updateDeprecation(deprecation, input, context); - - // Create the Deprecation aspect - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(entityUrn, DEPRECATION_ASPECT_NAME, deprecation); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to update Deprecation for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to update Deprecation for resource with entity urn %s", entityUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!isAuthorizedToUpdateDeprecationForEntity(context, entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateDeprecationInput(context.getOperationContext(), entityUrn, _entityService); + try { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + entityUrn.toString(), + DEPRECATION_ASPECT_NAME, + _entityService, + new Deprecation()); + updateDeprecation(deprecation, input, context); + + // Create the Deprecation aspect + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + entityUrn, DEPRECATION_ASPECT_NAME, deprecation); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + log.error( + "Failed to update Deprecation for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update Deprecation for resource with entity urn %s", entityUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext context, final Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToUpdateDeprecationForEntity( + final QueryContext context, final Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -89,19 +108,19 @@ private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext cont } public static Boolean validateUpdateDeprecationInput( - Urn entityUrn, - EntityService entityService - ) { + @Nonnull OperationContext opContext, Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(opContext, entityUrn, true)) { throw new IllegalArgumentException( - String.format("Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); + String.format( + "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); } return true; } - private static void updateDeprecation(Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { + private static void updateDeprecation( + Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { deprecation.setDeprecated(input.getDeprecated()); deprecation.setDecommissionTime(input.getDecommissionTime(), SetMode.REMOVE_IF_NULL); if (input.getNote() != null) { @@ -115,9 +134,10 @@ private static void updateDeprecation(Deprecation deprecation, UpdateDeprecation } catch (URISyntaxException e) { // Should never happen. throw new RuntimeException( - String.format("Failed to convert authorized actor into an Urn. actor urn: %s", - context.getActorUrn()), + String.format( + "Failed to convert authorized actor into an Urn. actor urn: %s", + context.getActorUrn()), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 1930cdc1f86676..ec2b0346288268 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.CreateDomainInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.domain.DomainProperties; @@ -23,22 +27,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS privilege. + * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -51,71 +48,97 @@ public class CreateDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateDomainInput input = bindArgument(environment.getArgument("input"), CreateDomainInput.class); - final Urn parentDomain = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canCreateDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - // Create the Domain Key - final DomainKey key = new DomainKey(); - - // Take user provided id OR generate a random UUID for the domain. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setId(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Domain already exists!"); - } - - if (parentDomain != null && !_entityClient.exists(parentDomain, context.getAuthentication())) { - throw new IllegalArgumentException("Parent Domain does not exist!"); - } - - if (DomainUtils.hasNameConflict(input.getName(), parentDomain, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, DOMAIN_ENTITY_NAME, - DOMAIN_PROPERTIES_ASPECT_NAME, mapDomainProperties(input, context)); - proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); - - String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return domainUrn; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + final CreateDomainInput input = + bindArgument(environment.getArgument("input"), CreateDomainInput.class); + final Urn parentDomain = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final DomainKey key = new DomainKey(); + + // Take user provided id OR generate a random UUID for the domain. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME))) { + throw new IllegalArgumentException("This Domain already exists!"); + } + + if (parentDomain != null + && !_entityClient.exists(context.getOperationContext(), parentDomain)) { + throw new IllegalArgumentException("Parent Domain does not exist!"); + } + + if (DomainUtils.hasNameConflict( + input.getName(), parentDomain, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + DOMAIN_ENTITY_NAME, + DOMAIN_PROPERTIES_ASPECT_NAME, + mapDomainProperties(input, context)); + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); + + String domainUrn = + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + OwnerUtils.addCreatorAsOwner( + context, domainUrn, OwnerEntityType.CORP_USER, _entityService); + return domainUrn; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Domain with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Domain with id: %s, name: %s", + input.getId(), input.getName()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private DomainProperties mapDomainProperties(final CreateDomainInput input, final QueryContext context) { + private DomainProperties mapDomainProperties( + final CreateDomainInput input, final QueryContext context) { final DomainProperties result = new DomainProperties(); result.setName(input.getName()); result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - result.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + result.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); if (input.getParentDomain() != null) { try { result.setParentDomain(Urn.createFromString(input.getParentDomain())); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), e); + throw new RuntimeException( + String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), + e); } } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java index 9ab90e8b4ff72c..eddb21303a7ee2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java @@ -3,6 +3,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; @@ -11,10 +12,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteDomainResolver implements DataFetcher> { @@ -25,37 +23,51 @@ public DeleteDomainResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String domainUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(domainUrn); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (AuthorizationUtils.canManageDomains(context) + || AuthorizationUtils.canDeleteEntity(urn, context)) { + try { + // Make sure there are no child domains + if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { + throw new RuntimeException( + String.format("Cannot delete domain %s which has child domains", domainUrn)); + } - if (AuthorizationUtils.canManageDomains(context) || AuthorizationUtils.canDeleteEntity(urn, context)) { - try { - // Make sure there are no child domains - if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { - throw new RuntimeException(String.format("Cannot delete domain %s which has child domains", domainUrn)); - } + _entityClient.deleteEntity(context.getOperationContext(), urn); + log.info( + String.format("I've successfully deleted the entity %s with urn", domainUrn)); - _entityClient.deleteEntity(urn, context.getAuthentication()); - log.info(String.format("I've successfully deleted the entity %s with urn", domainUrn)); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(context.getOperationContext(), urn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Domain with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for Domain with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", domainUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", domainUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 0bf551c4683e61..6a880503802cb4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -1,10 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; @@ -15,17 +19,12 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolves the entities in a particular Domain. - */ +/** Resolves the entities in a particular Domain. */ @Slf4j public class DomainEntitiesResolver implements DataFetcher> { @@ -49,50 +48,67 @@ public DomainEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Domain) environment.getSource()).getUrn(); - final DomainEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final DomainEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : DEFAULT_QUERY; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - try { - - final CriterionArray criteria = new CriterionArray(); - final Criterion filterCriterion = new Criterion() - .setField(DOMAINS_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - criteria.add(filterCriterion); - if (input.getFilters() != null) { - input.getFilters().forEach(filter -> { - criteria.add(new Criterion().setField(filter.getField()).setValue(filter.getValue())); - }); - } - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - query, - new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(criteria))), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with Domain with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + + final CriterionArray criteria = new CriterionArray(); + final Criterion filterCriterion = + new Criterion() + .setField(DOMAINS_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + criteria.add(filterCriterion); + if (input.getFilters() != null) { + input + .getFilters() + .forEach( + filter -> { + criteria.add( + criterionFromFilter( + filter, true, context.getOperationContext().getAspectRetriever())); + }); + } + + return UrnSearchResultsMapper.map( + context, + _entityClient.searchAcrossEntities( + context.getOperationContext(), + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(criteria))), + start, + count, + Collections.emptyList(), + null)); + + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to resolve entities associated with Domain with urn %s", urn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 3a751e502eb10a..e6d4238bc70546 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListDomainsInput; @@ -10,7 +14,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -18,18 +21,15 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS + * platform privilege. */ public class ListDomainsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -43,47 +43,58 @@ public ListDomainsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final Urn parentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final ListDomainsInput input = + bindArgument(environment.getArgument("input"), ListDomainsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final Urn parentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); - try { - // First, get all domain Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.DOMAIN_ENTITY_NAME, - query, - filter, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all domain Urns. + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.DOMAIN_ENTITY_NAME, + query, + filter, + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + start, + count); - // Now that we have entities we can bind this to a result. - final ListDomainsResult result = new ListDomainsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list domains", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListDomainsResult result = new ListDomainsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setDomains( + mapUnresolvedDomains( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list domains", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - // This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Domain objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedDomains(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index dcaa7d61ed90cd..d02bcedd0eb1ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,59 +1,79 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ParentDomainsResult; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import java.util.stream.Collectors; public class ParentDomainsResolver implements DataFetcher> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; + + public ParentDomainsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } - public ParentDomainsResolver(final EntityClient entityClient) { - _entityClient = entityClient; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final List parentDomains = new ArrayList<>(); + final Set visitedParentUrns = new HashSet<>(); + + if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Failed to resolve parents for entity type %s", urn)); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - final List parentDomains = new ArrayList<>(); - final Set visitedParentUrns = new HashSet<>(); - - if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { - throw new IllegalArgumentException(String.format("Failed to resolve parents for entity type %s", urn)); - } - - return CompletableFuture.supplyAsync(() -> { - try { - Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); - - while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { - parentDomains.add(parentDomain); - visitedParentUrns.add(parentDomain.getUrn()); - parentDomain = DomainUtils.getParentDomain(Urn.createFromString(parentDomain.getUrn()), context, _entityClient); - } - - final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load parent domains for entity %s", urn), e); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); + + while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { + parentDomains.add(parentDomain); + visitedParentUrns.add(parentDomain.getUrn()); + parentDomain = + DomainUtils.getParentDomain( + Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } - }); - } + + List viewable = + parentDomains.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + urn)) + .collect(Collectors.toList()); + + final ParentDomainsResult result = new ParentDomainsResult(); + result.setCount(viewable.size()); + result.setDomains(viewable); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load parent domains for entity %s", urn), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 56a76dcb1e07fa..6ada447ca59ee8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.domain.Domains; @@ -12,23 +16,23 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,49 +41,66 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); final Urn domainUrn = Urn.createFromString(environment.getArgument("domainUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateSetDomainInput( - entityUrn, - domainUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - setDomain(domains, domainUrn); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateSetDomainInput( + context.getOperationContext(), entityUrn, domainUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + entityUrn.toString(), + DOMAINS_ASPECT_NAME, + _entityService, + new Domains()); + setDomain(domains, domainUrn); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set Domain to resource with entity urn {}, domain urn {}: {}", entityUrn, domainUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set Domain to resource with entity urn %s, domain urn %s", entityUrn, domainUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + log.error( + "Failed to set Domain to resource with entity urn {}, domain urn {}: {}", + entityUrn, + domainUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to set Domain to resource with entity urn %s, domain urn %s", + entityUrn, domainUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } public static Boolean validateSetDomainInput( + @Nonnull OperationContext opContext, Urn entityUrn, Urn domainUrn, - EntityService entityService - ) { + EntityService entityService) { - if (!entityService.exists(domainUrn)) { + if (!entityService.exists(opContext, domainUrn, true)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Domain does not exist.", + entityUrn, domainUrn)); } - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(opContext, entityUrn, true)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Entity does not exist.", + entityUrn, domainUrn)); } return true; @@ -90,4 +111,4 @@ private static void setDomain(Domains domains, Urn domainUrn) { newDomain.add(domainUrn); domains.setDomains(newDomain); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index 01dd4f1254f8eb..783cf250a7ca61 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.domain.Domains; @@ -12,24 +16,23 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,41 +40,49 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - validateUnsetDomainInput( - entityUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - unsetDomain(domains); + validateUnsetDomainInput(context.getOperationContext(), entityUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + entityUrn.toString(), + DOMAINS_ASPECT_NAME, + _entityService, + new Domains()); + unsetDomain(domains); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to unset Domains for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + log.error( + "Failed to unset Domains for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } public static Boolean validateUnsetDomainInput( - Urn entityUrn, - EntityService entityService - ) { + @Nonnull OperationContext opContext, Urn entityUrn, EntityService entityService) { - if (!entityService.exists(entityUrn)) { + if (!entityService.exists(opContext, entityUrn, true)) { throw new IllegalArgumentException( String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn)); } @@ -85,4 +96,4 @@ private static void unsetDomain(@Nonnull Domains domains) { } domains.getDomains().clear(); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index dbaf6000477aa1..2a8944456d0c8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -1,11 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateEmbedInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; @@ -14,19 +19,13 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for updating the embed render URL for an asset. - */ +/** Resolver used for updating the embed render URL for an asset. */ @Slf4j @RequiredArgsConstructor public class UpdateEmbedResolver implements DataFetcher> { @@ -37,62 +36,79 @@ public class UpdateEmbedResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); + final UpdateEmbedInput input = + bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); final Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateEmbedInput(context.getOperationContext(), input, _entityService); + try { + final Embed embed = + (Embed) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + entityUrn.toString(), + EMBED_ASPECT_NAME, + _entityService, + new Embed()); - if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateEmbedInput( - input, - _entityService - ); - try { - final Embed embed = (Embed) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - EMBED_ASPECT_NAME, - _entityService, - new Embed()); + updateEmbed(embed, input); - updateEmbed(embed, input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); - _entityService.ingestProposal( - proposal, - new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()), - false - ); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e); - } - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); + _entityService.ingestProposal( + context.getOperationContext(), + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()), + false); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Embed for to resource with entity urn %s", entityUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } /** - * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input - * is not valid. + * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link + * IllegalArgumentException} if the input is not valid. * - * For an input to be valid, the target URN must exist. + *

For an input to be valid, the target URN must exist. * * @param input the input to validate * @param entityService an instance of {@link EntityService} used to validate the input. */ - private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { - if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { + private static void validateUpdateEmbedInput( + @Nonnull OperationContext opContext, + @Nonnull final UpdateEmbedInput input, + @Nonnull final EntityService entityService) { + if (!entityService.exists(opContext, UrnUtils.getUrn(input.getUrn()), true)) { throw new IllegalArgumentException( - String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn())); + String.format( + "Failed to update embed for entity with urn %s. Entity does not exist!", + input.getUrn())); } } /** * Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}. + * * @param embed an embed to update * @param input the updates to apply */ - private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { + private static void updateEmbed( + @Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index 613f97182c5dda..57570b431dd712 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.Objects; +import java.util.Set; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for returning whether an entity exists. - */ +/** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; - public EntityExistsResolver(final EntityService entityService) { + public EntityExistsResolver(final EntityService entityService) { _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); String entityUrnString = bindArgument(environment.getArgument("urn"), String.class); // resolver can be used as its own endpoint or when hydrating an entity if (entityUrnString == null && environment.getSource() != null) { @@ -31,12 +33,18 @@ public CompletableFuture get(final DataFetchingEnvironment environment) Objects.requireNonNull(entityUrnString, "Entity urn must not be null!"); final Urn entityUrn = Urn.createFromString(entityUrnString); - return CompletableFuture.supplyAsync(() -> { - try { - return _entityService.exists(entityUrn); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to check whether entity %s exists", entityUrn.toString())); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return _entityService + .exists(context.getOperationContext(), Set.of(entityUrn)) + .contains(entityUrn); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to check whether entity %s exists", entityUrn.toString())); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index d8190a160f268d..b25f5598b44bc0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -1,25 +1,27 @@ package com.linkedin.datahub.graphql.resolvers.entity; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + +import com.datahub.authorization.AuthUtil; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPrivileges; -import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.extern.slf4j.Slf4j; - -import java.util.Collections; +import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityPrivilegesResolver implements DataFetcher> { @@ -36,29 +38,37 @@ public CompletableFuture get(DataFetchingEnvironment environme final String urnString = ((Entity) environment.getSource()).getUrn(); final Urn urn = UrnUtils.getUrn(urnString); - return CompletableFuture.supplyAsync(() -> { - switch (urn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return getGlossaryTermPrivileges(urn, context); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return getGlossaryNodePrivileges(urn, context); - case Constants.DATASET_ENTITY_NAME: - return getDatasetPrivileges(urn, context); - case Constants.CHART_ENTITY_NAME: - return getChartPrivileges(urn, context); - case Constants.DASHBOARD_ENTITY_NAME: - return getDashboardPrivileges(urn, context); - case Constants.DATA_JOB_ENTITY_NAME: - return getDataJobPrivileges(urn, context); - default: - log.warn("Tried to get entity privileges for entity type {} but nothing is implemented for it yet", urn.getEntityType()); - return new EntityPrivileges(); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + switch (urn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return getGlossaryTermPrivileges(urn, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return getGlossaryNodePrivileges(urn, context); + case Constants.DATASET_ENTITY_NAME: + return getDatasetPrivileges(urn, context); + case Constants.CHART_ENTITY_NAME: + return getChartPrivileges(urn, context); + case Constants.DASHBOARD_ENTITY_NAME: + return getDashboardPrivileges(urn, context); + case Constants.DATA_JOB_ENTITY_NAME: + return getDataJobPrivileges(urn, context); + default: + log.warn( + "Tried to get entity privileges for entity type {}. Adding common privileges only.", + urn.getEntityType()); + EntityPrivileges commonPrivileges = new EntityPrivileges(); + addCommonPrivileges(commonPrivileges, urn, context); + return commonPrivileges; + } + }, + this.getClass().getSimpleName(), + "get"); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); + addCommonPrivileges(result, termUrn, context); result.setCanManageEntity(false); if (GlossaryUtils.canManageGlossaries(context)) { result.setCanManageEntity(true); @@ -66,7 +76,8 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con } Urn parentNodeUrn = GlossaryUtils.getParentUrn(termUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; @@ -74,65 +85,62 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); + addCommonPrivileges(result, nodeUrn, context); result.setCanManageEntity(false); if (GlossaryUtils.canManageGlossaries(context)) { result.setCanManageEntity(true); result.setCanManageChildren(true); return result; } - Boolean canManageChildren = GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); + Boolean canManageChildren = + GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); result.setCanManageChildren(canManageChildren); Urn parentNodeUrn = GlossaryUtils.getParentUrn(nodeUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup orPrivilegesGroup = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - urn.getEntityType(), - urn.toString(), - orPrivilegesGroup); + return AuthUtil.isAuthorizedUrns( + context.getAuthorizer(), context.getActorUrn(), LINEAGE, UPDATE, List.of(urn)); } private EntityPrivileges getDatasetPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); result.setCanEditQueries(AuthorizationUtils.canCreateQuery(ImmutableList.of(urn), context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getChartPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getDashboardPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); result.setCanEditEmbed(EmbedUtils.isAuthorizedToUpdateEmbedForEntity(urn, context)); + addCommonPrivileges(result, urn, context); return result; } private EntityPrivileges getDataJobPrivileges(Urn urn, QueryContext context) { final EntityPrivileges result = new EntityPrivileges(); - result.setCanEditLineage(canEditEntityLineage(urn, context)); + addCommonPrivileges(result, urn, context); return result; } + + private void addCommonPrivileges( + @Nonnull EntityPrivileges result, @Nonnull Urn urn, @Nonnull QueryContext context) { + result.setCanEditLineage(canEditEntityLineage(urn, context)); + result.setCanEditProperties(AuthorizationUtils.canEditProperties(urn, context)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java new file mode 100644 index 00000000000000..03282934293ea5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchAssignFormResolver.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchAssignFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchAssignFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _formService.batchAssignFormToEntities( + context.getOperationContext(), + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java new file mode 100644 index 00000000000000..ac2a5fc1ec0ea6 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolver.java @@ -0,0 +1,57 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class BatchRemoveFormResolver implements DataFetcher> { + + private final FormService _formService; + + public BatchRemoveFormResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final BatchAssignFormInput input = + bindArgument(environment.getArgument("input"), BatchAssignFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final List entityUrns = input.getEntityUrns(); + final Authentication authentication = context.getAuthentication(); + + // TODO: (PRD-1062) Add permission check once permission exists + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _formService.batchUnassignFormForEntities( + context.getOperationContext(), + entityUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + formUrn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java new file mode 100644 index 00000000000000..3cf4d9175d45bf --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateDynamicFormAssignmentResolver.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class CreateDynamicFormAssignmentResolver + implements DataFetcher> { + + private final FormService _formService; + + public CreateDynamicFormAssignmentResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final CreateDynamicFormAssignmentInput input = + bindArgument(environment.getArgument("input"), CreateDynamicFormAssignmentInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final DynamicFormAssignment formAssignment = + FormUtils.mapDynamicFormAssignment( + input, context.getOperationContext().getAspectRetriever()); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _formService.createDynamicFormAssignment( + context.getOperationContext(), formAssignment, formUrn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolver.java new file mode 100644 index 00000000000000..e9962464059e6f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolver.java @@ -0,0 +1,83 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.CreateFormInput; +import com.linkedin.datahub.graphql.generated.CreatePromptInput; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.datahub.graphql.types.form.FormMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.FormInfo; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class CreateFormResolver implements DataFetcher> { + + private final EntityClient _entityClient; + private final FormService _formService; + + public CreateFormResolver( + @Nonnull final EntityClient entityClient, @Nonnull final FormService formService) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture

get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + + final CreateFormInput input = + bindArgument(environment.getArgument("input"), CreateFormInput.class); + final FormInfo formInfo = FormUtils.mapFormInfo(input); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.canManageForms(context)) { + throw new AuthorizationException("Unable to create form. Please contact your admin."); + } + validatePrompts(input.getPrompts()); + + Urn formUrn = + _formService.createForm(context.getOperationContext(), formInfo, input.getId()); + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), Constants.FORM_ENTITY_NAME, formUrn, null); + return FormMapper.map(context, response); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } + + private void validatePrompts(@Nullable List prompts) { + if (prompts == null) { + return; + } + prompts.forEach( + prompt -> { + if (prompt.getType().equals(FormPromptType.STRUCTURED_PROPERTY) + || prompt.getType().equals(FormPromptType.FIELDS_STRUCTURED_PROPERTY)) { + if (prompt.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Provided prompt with type STRUCTURED_PROPERTY or FIELDS_STRUCTURED_PROPERTY and no structured property params"); + } + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolver.java new file mode 100644 index 00000000000000..eec6816042a402 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolver.java @@ -0,0 +1,65 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.DeleteFormInput; +import com.linkedin.entity.client.EntityClient; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class DeleteFormResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + public DeleteFormResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final DeleteFormInput input = + bindArgument(environment.getArgument("input"), DeleteFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getUrn()); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.canManageForms(context)) { + throw new AuthorizationException("Unable to delete form. Please contact your admin."); + } + _entityClient.deleteEntity(context.getOperationContext(), formUrn); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(context.getOperationContext(), formUrn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Form with urn %s", + formUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java new file mode 100644 index 00000000000000..29e5cd55f15e24 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolver.java @@ -0,0 +1,83 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class IsFormAssignedToMeResolver implements DataFetcher> { + + private final GroupService _groupService; + + public IsFormAssignedToMeResolver(@Nonnull final GroupService groupService) { + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final FormActorAssignment parent = environment.getSource(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + + // Assign urn and group urns + final Set assignedUserUrns = + parent.getUsers() != null + ? parent.getUsers().stream().map(CorpUser::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Set assignedGroupUrns = + parent.getGroups() != null + ? parent.getGroups().stream().map(CorpGroup::getUrn).collect(Collectors.toSet()) + : Collections.emptySet(); + + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + + // First check whether user is directly assigned. + if (assignedUserUrns.size() > 0) { + boolean isUserAssigned = assignedUserUrns.contains(userUrn.toString()); + if (isUserAssigned) { + return true; + } + } + + // Next check whether the user is assigned indirectly, by group. + if (assignedGroupUrns.size() > 0) { + final List groupUrns = + _groupService.getGroupsForUser(context.getOperationContext(), userUrn); + boolean isUserGroupAssigned = + groupUrns.stream() + .anyMatch(groupUrn -> assignedGroupUrns.contains(groupUrn.toString())); + if (isUserGroupAssigned) { + return true; + } + } + } catch (Exception e) { + log.error( + "Failed to determine whether the form is assigned to the currently authenticated user! Returning false.", + e); + } + + // Else the user is not directly assigned. + return false; + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java new file mode 100644 index 00000000000000..9528bb125473e7 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/SubmitFormPromptResolver.java @@ -0,0 +1,92 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.metadata.service.FormService; +import com.linkedin.structured.PrimitivePropertyValueArray; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class SubmitFormPromptResolver implements DataFetcher> { + + private final FormService _formService; + + public SubmitFormPromptResolver(@Nonnull final FormService formService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final Urn entityUrn = UrnUtils.getUrn(environment.getArgument("urn")); + final SubmitFormPromptInput input = + bindArgument(environment.getArgument("input"), SubmitFormPromptInput.class); + final String promptId = input.getPromptId(); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final String fieldPath = input.getFieldPath(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + if (input.getType().equals(FormPromptType.STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitStructuredPropertyPromptResponse( + context.getOperationContext(), + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId); + } else if (input.getType().equals(FormPromptType.FIELDS_STRUCTURED_PROPERTY)) { + if (input.getStructuredPropertyParams() == null) { + throw new IllegalArgumentException( + "Failed to provide structured property params for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + if (fieldPath == null) { + throw new IllegalArgumentException( + "Failed to provide fieldPath for prompt type FIELDS_STRUCTURED_PROPERTY"); + } + final Urn structuredPropertyUrn = + UrnUtils.getUrn(input.getStructuredPropertyParams().getStructuredPropertyUrn()); + final PrimitivePropertyValueArray values = + FormUtils.getStructuredPropertyValuesFromInput(input); + + return _formService.submitFieldStructuredPropertyPromptResponse( + context.getOperationContext(), + entityUrn, + structuredPropertyUrn, + values, + formUrn, + promptId, + fieldPath); + } + return false; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolver.java new file mode 100644 index 00000000000000..8b4d1debcd4db9 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolver.java @@ -0,0 +1,98 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.UpdateFormInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.FormUtils; +import com.linkedin.datahub.graphql.types.form.FormMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.FormType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.patch.builder.FormInfoPatchBuilder; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class UpdateFormResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + public UpdateFormResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + + final UpdateFormInput input = + bindArgument(environment.getArgument("input"), UpdateFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getUrn()); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.canManageForms(context)) { + throw new AuthorizationException("Unable to update form. Please contact your admin."); + } + if (!_entityClient.exists(context.getOperationContext(), formUrn)) { + throw new IllegalArgumentException( + String.format("Form with urn %s does not exist", formUrn)); + } + + FormInfoPatchBuilder patchBuilder = new FormInfoPatchBuilder().urn(formUrn); + if (input.getName() != null) { + patchBuilder.setName(input.getName()); + } + if (input.getDescription() != null) { + patchBuilder.setDescription(input.getDescription()); + } + if (input.getType() != null) { + patchBuilder.setType(FormType.valueOf(input.getType().toString())); + } + if (input.getPromptsToAdd() != null) { + patchBuilder.addPrompts(FormUtils.mapPromptsToAdd(input.getPromptsToAdd())); + } + if (input.getPromptsToRemove() != null) { + patchBuilder.removePrompts(input.getPromptsToRemove()); + } + if (input.getActors() != null) { + if (input.getActors().getOwners() != null) { + patchBuilder.setOwnershipForm(input.getActors().getOwners()); + } + if (input.getActors().getUsersToAdd() != null) { + input.getActors().getUsersToAdd().forEach(patchBuilder::addAssignedUser); + } + if (input.getActors().getUsersToRemove() != null) { + input.getActors().getUsersToRemove().forEach(patchBuilder::removeAssignedUser); + } + if (input.getActors().getGroupsToAdd() != null) { + input.getActors().getGroupsToAdd().forEach(patchBuilder::addAssignedGroup); + } + if (input.getActors().getGroupsToRemove() != null) { + input.getActors().getGroupsToRemove().forEach(patchBuilder::removeAssignedGroup); + } + } + _entityClient.ingestProposal( + context.getOperationContext(), patchBuilder.build(), false); + + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), Constants.FORM_ENTITY_NAME, formUrn, null); + return FormMapper.map(context, response); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java new file mode 100644 index 00000000000000..76103d77fb32c5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolver.java @@ -0,0 +1,66 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class VerifyFormResolver implements DataFetcher> { + + private final FormService _formService; + private final GroupService _groupService; + + public VerifyFormResolver( + @Nonnull final FormService formService, @Nonnull final GroupService groupService) { + _formService = Objects.requireNonNull(formService, "formService must not be null"); + _groupService = Objects.requireNonNull(groupService, "groupService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final VerifyFormInput input = + bindArgument(environment.getArgument("input"), VerifyFormInput.class); + final Urn formUrn = UrnUtils.getUrn(input.getFormUrn()); + final Urn entityUrn = UrnUtils.getUrn(input.getEntityUrn()); + final Authentication authentication = context.getAuthentication(); + final Urn actorUrn = UrnUtils.getUrn(authentication.getActor().toUrnStr()); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final List groupsForUser = + _groupService.getGroupsForUser(context.getOperationContext(), actorUrn); + if (!_formService.isFormAssignedToUser( + context.getOperationContext(), formUrn, entityUrn, actorUrn, groupsForUser)) { + throw new AuthorizationException( + String.format( + "Failed to authorize form on entity as form with urn %s is not assigned to user", + formUrn)); + } + _formService.verifyFormForEntity(context.getOperationContext(), formUrn, entityUrn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 69b5b14edfbeeb..1e99ea120354ee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -1,105 +1,144 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor public class AddRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - validateRelatedTermsInput(urn, termUrns); - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - glossaryRelatedTerms = new GlossaryRelatedTerms(); - } - - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + validateRelatedTermsInput(context.getOperationContext(), urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms( + context.getOperationContext(), + termUrns, + existingTermUrns, + urn, + glossaryRelatedTerms, + actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms( + context.getOperationContext(), + termUrns, + existingTermUrns, + urn, + glossaryRelatedTerms, + actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add related terms to %s", input.getUrn()), e); } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } - public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + public Boolean validateRelatedTermsInput( + @Nonnull OperationContext opContext, Urn urn, List termUrns) { + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(opContext, urn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); } for (Urn termUrn : termUrns) { if (termUrn.equals(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. Tried to create related term with itself.", urn)); } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); - } else if (!_entityService.exists(termUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + } else if (!_entityService.exists(opContext, termUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } } return true; } - private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + private Boolean updateRelatedTerms( + @Nonnull OperationContext opContext, + List termUrns, + GlossaryTermUrnArray existingTermUrns, + Urn urn, + GlossaryRelatedTerms glossaryRelatedTerms, + Urn actor) { List termsToAdd = new ArrayList<>(); for (Urn termUrn : termUrns) { if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { @@ -117,7 +156,13 @@ private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray exis existingTermUrns.add(newUrn); } - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + persistAspect( + opContext, + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); return true; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index cc0ab4e03a4e8d..75239ae8e7eeb6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -1,14 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; @@ -19,18 +23,11 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -41,43 +38,59 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - try { - final GlossaryNodeKey key = new GlossaryNodeKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Node already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, mapGlossaryNodeInfo(input)); - - String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + try { + final GlossaryNodeKey key = new GlossaryNodeKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME))) { + throw new IllegalArgumentException("This Glossary Node already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_NODE_ENTITY_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + mapGlossaryNodeInfo(input)); + + String glossaryNodeUrn = + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + + OwnerUtils.addCreatorAsOwner( + context, glossaryNodeUrn, OwnerEntityType.CORP_USER, _entityService); + return glossaryNodeUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryNode with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNode with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryNodeUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput input) { @@ -90,10 +103,12 @@ private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index ad69e0c5876e2c..d524a07b541621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -1,15 +1,19 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.EntityResponse; @@ -25,9 +29,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -37,12 +38,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -57,42 +54,61 @@ public class CreateGlossaryTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - // Ensure there isn't another glossary term with the same name at this level of the glossary - validateGlossaryTermName(parentNode, context, input.getName()); - try { - final GlossaryTermKey key = new GlossaryTermKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Term already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, mapGlossaryTermInfo(input)); - - String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + // Ensure there isn't another glossary term with the same name at this level of the + // glossary + validateGlossaryTermName(parentNode, context, input.getName()); + try { + final GlossaryTermKey key = new GlossaryTermKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME))) { + throw new IllegalArgumentException("This Glossary Term already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + mapGlossaryTermInfo(input)); + + String glossaryTermUrn = + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + + OwnerUtils.addCreatorAsOwner( + context, glossaryTermUrn, OwnerEntityType.CORP_USER, _entityService); + return glossaryTermUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryTerm with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryTerm with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryTermUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput input) { @@ -106,7 +122,10 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; @@ -114,31 +133,28 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp private Filter buildParentNodeFilter(final Urn parentNodeUrn) { final Map criterionMap = new HashMap<>(); - criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); + criterionMap.put( + PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); return QueryUtils.newFilter(criterionMap); } private Map getTermsWithSameParent(Urn parentNode, QueryContext context) { try { final Filter filter = buildParentNodeFilter(parentNode); - final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + _entityClient.filter( + context.getOperationContext(), GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000); - final List termUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List termUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); return _entityClient.batchGetV2( + context.getOperationContext(), GLOSSARY_TERM_ENTITY_NAME, new HashSet<>(termUrns), - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication()); + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)); } catch (Exception e) { throw new RuntimeException("Failed fetching Glossary Terms with the same parent", e); } @@ -147,14 +163,17 @@ private Map getTermsWithSameParent(Urn parentNode, QueryCon private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) { Map entities = getTermsWithSameParent(parentNode, context); - entities.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); - GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); - if (termInfo.hasName() && termInfo.getName().equals(name)) { - throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary"); - } - } - }); + entities.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); + if (termInfo.hasName() && termInfo.getName().equals(name)) { + throw new IllegalArgumentException( + "Glossary Term with this name already exists at this level of the Business Glossary"); + } + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index 0929c7138528da..26f0c61de1b0f9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -2,6 +2,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; @@ -11,50 +12,62 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; + private final EntityService _entityService; - public DeleteGlossaryEntityResolver(final EntityClient entityClient, EntityService entityService) { + public DeleteGlossaryEntityResolver( + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("urn")); final Urn parentNodeUrn = GlossaryUtils.getParentUrn(entityUrn, context, _entityClient); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { - throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); - } - - try { - _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + if (!_entityService.exists(context.getOperationContext(), entityUrn, true)) { + throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); + } - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { try { - _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + _entityClient.deleteEntity(context.getOperationContext(), entityUrn); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + context.getOperationContext(), entityUrn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for glossary entity with urn %s", + entityUrn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for glossary entity with urn %s", entityUrn), e); + throw new RuntimeException( + String.format( + "Failed to perform delete against glossary entity with urn %s", entityUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against glossary entity with urn %s", entityUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index 1457a308c8774f..451abfdaf1c063 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GetRootGlossaryEntitiesInput; import com.linkedin.datahub.graphql.generated.GetRootGlossaryNodesResult; @@ -20,15 +23,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryNodesResolver implements DataFetcher> { +public class GetRootGlossaryNodesResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +38,60 @@ public GetRootGlossaryNodesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsNodesResult = _entityClient.filter( - Constants.GLOSSARY_NODE_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); - - final List glossaryNodeUrns = gmsNodesResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); - - final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); - result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); - result.setCount(glossaryNodeUrns.size()); - result.setStart(gmsNodesResult.getFrom()); - result.setTotal(gmsNodesResult.getNumEntities()); - - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsNodesResult = + _entityClient.filter( + context.getOperationContext(), + Constants.GLOSSARY_NODE_ENTITY_NAME, + filter, + null, + start, + count); + + final List glossaryNodeUrns = + gmsNodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); + result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); + result.setCount(glossaryNodeUrns.size()); + result.setStart(gmsNodesResult.getFrom()); + result.setTotal(gmsNodesResult.getNumEntities()); + + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } @@ -101,4 +106,3 @@ private List mapUnresolvedGlossaryNodes(final List entityUrns return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index f7684e477f8307..7ca79b168819e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GetRootGlossaryEntitiesInput; import com.linkedin.datahub.graphql.generated.GetRootGlossaryTermsResult; @@ -20,15 +23,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryTermsResolver implements DataFetcher> { +public class GetRootGlossaryTermsResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +38,60 @@ public GetRootGlossaryTermsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsTermsResult = _entityClient.filter( - Constants.GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsTermsResult = + _entityClient.filter( + context.getOperationContext(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + filter, + null, + start, + count); - final List glossaryTermUrns = gmsTermsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List glossaryTermUrns = + gmsTermsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); - result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); - result.setCount(glossaryTermUrns.size()); - result.setStart(gmsTermsResult.getFrom()); - result.setTotal(gmsTermsResult.getNumEntities()); + final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); + result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); + result.setCount(glossaryTermUrns.size()); + result.setStart(gmsTermsResult.getFrom()); + result.setTotal(gmsTermsResult.getNumEntities()); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); - } - }); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index d513d70f39f589..92c8aa7fd2d133 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,8 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; + import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.GlossaryNode; @@ -14,18 +21,14 @@ import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; - -public class ParentNodesResolver implements DataFetcher> { +public class ParentNodesResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,21 +39,28 @@ public ParentNodesResolver(final EntityClient entityClient) { private void aggregateParentNodes(List nodes, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(dataMap); if (nodeInfo.hasParentNode()) { Urn parentNodeUrn = nodeInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), + parentNodeUrn.getEntityType(), + parentNodeUrn, + null); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); nodes.add(mappedNode); aggregateParentNodes(nodes, mappedNode.getUrn(), context); } @@ -64,21 +74,28 @@ private void aggregateParentNodes(List nodes, String urn, QueryCon private GlossaryNode getTermParentNode(String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); if (termInfo.hasParentNode()) { Urn parentNodeUrn = termInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), + parentNodeUrn.getEntityType(), + parentNodeUrn, + null); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); return mappedNode; } } @@ -95,27 +112,41 @@ public CompletableFuture get(DataFetchingEnvironment environm final String urn = ((Entity) environment.getSource()).getUrn(); final List nodes = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - final String type = Urn.createFromString(urn).getEntityType(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final String type = Urn.createFromString(urn).getEntityType(); - if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { - final GlossaryNode parentNode = getTermParentNode(urn, context); - if (parentNode != null) { - nodes.add(parentNode); - aggregateParentNodes(nodes, parentNode.getUrn(), context); - } - } else { - aggregateParentNodes(nodes, urn, context); - } + if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { + final GlossaryNode parentNode = getTermParentNode(urn, context); + if (parentNode != null) { + nodes.add(parentNode); + aggregateParentNodes(nodes, parentNode.getUrn(), context); + } + } else { + aggregateParentNodes(nodes, urn, context); + } - final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); - return result; - } catch (DataHubGraphQLException | URISyntaxException e) { - throw new RuntimeException(("Failed to load parent nodes")); - } - }); + List viewable = + nodes.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + UrnUtils.getUrn(urn))) + .collect(Collectors.toList()); + + final ParentNodesResult result = new ParentNodesResult(); + result.setCount(viewable.size()); + result.setNodes(viewable); + return result; + } catch (DataHubGraphQLException | URISyntaxException e) { + throw new RuntimeException(("Failed to load parent nodes")); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 417ef4292d0f7d..59f820d7cbd36e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -1,90 +1,123 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor public class RemoveRelatedTermsResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrnsToRemove = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final List termUrnsToRemove = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); - } + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(context.getOperationContext(), urn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", + urn, urn)); + } - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); - } + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + throw new RuntimeException( + String.format("Related Terms for this Urn do not exist: %s", urn)); + } - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + context.getOperationContext(), + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + context.getOperationContext(), + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to removes related terms from %s", input.getUrn()), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java index daff0962bc2e81..4a78547421518c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -7,6 +10,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -17,13 +21,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver that adds a set of native members to a group, if the user and group both exist. - */ +/** Resolver that adds a set of native members to a group, if the user and group both exist. */ public class AddGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,9 +31,11 @@ public AddGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final AddGroupMembersInput input = bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); + final AddGroupMembersInput input = + bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,36 +46,48 @@ public CompletableFuture get(final DataFetchingEnvironment environment) "Unauthorized to perform this action. Please contact your DataHub administrator."); } - if (!_groupService.groupExists(groupUrn)) { + if (!_groupService.groupExists(context.getOperationContext(), groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( String.format("Failed to add members to group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership for group %s when adding group members", groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually added to it", - groupUrnStr)); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Origin groupOrigin = + _groupService.getGroupOrigin(context.getOperationContext(), groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + context.getOperationContext(), groupUrn, context.getActorUrn()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership for group %s when adding group members", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually added to it", + groupUrnStr)); + } - try { - // Add each user to the group - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - userUrnList.forEach(userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add group members to group %s", groupUrnStr)); - } - }); + try { + // Add each user to the group + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + userUrnList.forEach( + userUrn -> + _groupService.addUserToNativeGroup( + context.getOperationContext(), userUrn, groupUrn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add group members to group %s", groupUrnStr)); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index 75f2a61287ecc4..7116d99bd0aba0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGroupInput; import com.linkedin.metadata.key.CorpGroupKey; @@ -12,10 +15,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -// Currently, this resolver will override the group details, but not group membership, if a group with the same name already exists. +// Currently, this resolver will override the group details, but not group membership, if a group +// with the same name already exists. public class CreateGroupResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,19 +34,25 @@ public CompletableFuture get(final DataFetchingEnvironment environment) throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - final CreateGroupInput input = bindArgument(environment.getArgument("input"), CreateGroupInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, check if the group already exists. - // Create the Group key. - final CorpGroupKey key = new CorpGroupKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". - return _groupService.createNativeGroup(key, input.getName(), input.getDescription(), authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create group", e); - } - }); + final CreateGroupInput input = + bindArgument(environment.getArgument("input"), CreateGroupInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, check if the group already exists. + // Create the Group key. + final CorpGroupKey key = new CorpGroupKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + final String description = input.getDescription() != null ? input.getDescription() : ""; + key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". + return _groupService.createNativeGroup( + context.getOperationContext(), key, input.getName(), description); + } catch (Exception e) { + throw new RuntimeException("Failed to create group", e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index d0874b21fb1062..ac195ca5d82520 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -1,11 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; import com.linkedin.datahub.graphql.generated.EntityCountResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.service.ViewService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import io.opentelemetry.extension.annotations.WithSpan; @@ -14,44 +18,58 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class EntityCountsResolver implements DataFetcher> { private final EntityClient _entityClient; - public EntityCountsResolver(final EntityClient entityClient) { + private final ViewService _viewService; + + public EntityCountsResolver(final EntityClient entityClient, final ViewService viewService) { _entityClient = entityClient; + _viewService = viewService; } @Override @WithSpan - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final EntityCountInput input = bindArgument(environment.getArgument("input"), EntityCountInput.class); - final EntityCountResults results = new EntityCountResults(); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all counts - Map gmsResult = _entityClient.batchGetTotalEntityCount( - input.getTypes().stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), context.getAuthentication()); - - // bind to a result. - List resultList = gmsResult.entrySet().stream().map(entry -> { - EntityCountResult result = new EntityCountResult(); - result.setCount(Math.toIntExact(entry.getValue())); - result.setEntityType(EntityTypeMapper.getType(entry.getKey())); - return result; - }).collect(Collectors.toList()); - results.setCounts(resultList); - return results; - } catch (Exception e) { - throw new RuntimeException("Failed to get entity counts", e); - } - }); + final EntityCountInput input = + bindArgument(environment.getArgument("input"), EntityCountInput.class); + final EntityCountResults results = new EntityCountResults(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all counts + Map gmsResult = + _entityClient.batchGetTotalEntityCount( + context.getOperationContext(), + input.getTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + viewFilter(context.getOperationContext(), _viewService, input.getViewUrn())); + + // bind to a result. + List resultList = + gmsResult.entrySet().stream() + .map( + entry -> { + EntityCountResult result = new EntityCountResult(); + result.setCount(Math.toIntExact(entry.getValue())); + result.setEntityType(EntityTypeMapper.getType(entry.getKey())); + return result; + }) + .collect(Collectors.toList()); + results.setCounts(resultList); + return results; + } catch (Exception e) { + throw new RuntimeException("Failed to get entity counts", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index 67cc84a33a9541..0632af68998dc9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.EntityType; @@ -10,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListGroupsResult; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -18,16 +21,13 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListGroupsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -41,51 +41,72 @@ public ListGroupsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListGroupsInput input = bindArgument(environment.getArgument("input"), ListGroupsInput.class); + final ListGroupsInput input = + bindArgument(environment.getArgument("input"), ListGroupsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all group Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_GROUP_ENTITY_NAME, + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion().setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, count, context.getAuthentication(), - new SearchFlags().setFulltext(true)); + Collections.singletonList( + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + start, + count); - // Then, get hydrate all groups. - final Map entities = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), null, context.getAuthentication()); + // Then, get hydrate all groups. + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + CORP_GROUP_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null); - // Now that we have entities we can bind this to a result. - final ListGroupsResult result = new ListGroupsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setGroups(mapUnresolvedGroups(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list groups", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListGroupsResult result = new ListGroupsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setGroups( + mapUnresolvedGroups( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list groups", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - // This method maps urns returned from the list endpoint into Partial Group objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Group objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedGroups(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java index 287b4aa7b5dbd7..55a26af603fbe0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -7,6 +10,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -17,10 +21,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class RemoveGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -30,9 +30,11 @@ public RemoveGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final RemoveGroupMembersInput input = bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); + final RemoveGroupMembersInput input = + bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -43,37 +45,46 @@ public CompletableFuture get(final DataFetchingEnvironment environment) } final Urn groupUrn = Urn.createFromString(groupUrnStr); - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!_groupService.groupExists(groupUrn)) { + if (!_groupService.groupExists(context.getOperationContext(), groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( - String.format("Failed to add remove members from group %s. Group does not exist.", groupUrnStr), + String.format( + "Failed to add remove members from group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership when removing group members from group %s", - groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually removed from it", - groupUrnStr)); - } - try { - _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Origin groupOrigin = + _groupService.getGroupOrigin(context.getOperationContext(), groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + context.getOperationContext(), groupUrn, context.getActorUrn()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership when removing group members from group %s", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually removed from it", + groupUrnStr)); + } + try { + _groupService.removeExistingNativeGroupMembers( + context.getOperationContext(), groupUrn, userUrnList); + return true; + } catch (Exception e) { + throw new RuntimeException(e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java index 99481868e30cee..04196b01c108e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java @@ -3,6 +3,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; @@ -10,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class RemoveGroupResolver implements DataFetcher> { @@ -24,30 +22,41 @@ public RemoveGroupResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String groupUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(groupUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(context.getOperationContext(), urn); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(context.getOperationContext(), urn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for group with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for group with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against group with urn %s", groupUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against group with urn %s", groupUrn), e); - } - }); + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java new file mode 100644 index 00000000000000..380b7c920ab2ff --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolver.java @@ -0,0 +1,326 @@ +package com.linkedin.datahub.graphql.resolvers.health; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationships; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Health; +import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.generated.HealthStatusType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.timeseries.AggregationSpec; +import com.linkedin.timeseries.AggregationType; +import com.linkedin.timeseries.GenericTable; +import com.linkedin.timeseries.GroupingBucket; +import com.linkedin.timeseries.GroupingBucketType; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.extern.slf4j.Slf4j; + +/** + * Resolver for generating the health badge for an asset, which depends on + * + *

1. Assertions status - whether the asset has active assertions 2. Incidents status - whether + * the asset has active incidents + */ +@Slf4j +public class EntityHealthResolver implements DataFetcher>> { + private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; + private static final String ASSERTION_RUN_EVENT_SUCCESS_TYPE = "SUCCESS"; + private static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + private static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + + private final EntityClient _entityClient; + private final GraphClient _graphClient; + private final TimeseriesAspectService _timeseriesAspectService; + + private final Config _config; + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService) { + this(entityClient, graphClient, timeseriesAspectService, new Config(true, true)); + } + + public EntityHealthResolver( + @Nonnull final EntityClient entityClient, + @Nonnull final GraphClient graphClient, + @Nonnull final TimeseriesAspectService timeseriesAspectService, + @Nonnull final Config config) { + _entityClient = entityClient; + _graphClient = graphClient; + _timeseriesAspectService = timeseriesAspectService; + _config = config; + } + + @Override + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { + final Entity parent = environment.getSource(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final HealthStatuses statuses = + computeHealthStatusForAsset(parent.getUrn(), environment.getContext()); + return statuses.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve asset's health status.", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + /** + * Computes the "resolved health status" for an asset by + * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing + */ + private HealthStatuses computeHealthStatusForAsset( + final String entityUrn, final QueryContext context) { + final List healthStatuses = new ArrayList<>(); + + if (_config.getIncidentsEnabled()) { + final Health incidentsHealth = computeIncidentsHealthForAsset(entityUrn, context); + if (incidentsHealth != null) { + healthStatuses.add(incidentsHealth); + } + } + + if (_config.getAssertionsEnabled()) { + final Health assertionsHealth = computeAssertionHealthForAsset(entityUrn, context); + if (assertionsHealth != null) { + healthStatuses.add(assertionsHealth); + } + } + + return new HealthStatuses(healthStatuses); + } + + /** + * Returns the resolved "incidents health", which is currently a static function of whether there + * are any active incidents open on an asset + * + * @param entityUrn the asset to compute health for + * @param context the query context + * @return an instance of {@link Health} for the entity, null if one cannot be computed. + */ + private Health computeIncidentsHealthForAsset( + final String entityUrn, final QueryContext context) { + try { + final Filter filter = buildIncidentsEntityFilter(entityUrn, IncidentState.ACTIVE.toString()); + final SearchResult searchResult = + _entityClient.filter( + context.getOperationContext(), Constants.INCIDENT_ENTITY_NAME, filter, null, 0, 1); + final Integer activeIncidentCount = searchResult.getNumEntities(); + if (activeIncidentCount > 0) { + // There are active incidents. + return new Health( + HealthStatusType.INCIDENTS, + HealthStatus.FAIL, + String.format( + "%s active incident%s", activeIncidentCount, activeIncidentCount > 1 ? "s" : ""), + ImmutableList.of("ACTIVE_INCIDENTS")); + } + // Report pass if there are no active incidents. + return new Health(HealthStatusType.INCIDENTS, HealthStatus.PASS, null, null); + } catch (RemoteInvocationException e) { + log.error("Failed to compute incident health status!", e); + return null; + } + } + + private Filter buildIncidentsEntityFilter(final String entityUrn, final String state) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, state); + return QueryUtils.newFilter(criterionMap); + } + + /** + * TODO: Replace this with the assertions summary aspect. + * + *

Returns the resolved "assertions health", which is currently a static function of whether + * the most recent run of all asset assertions has succeeded. + * + * @param entityUrn the entity to compute health for + * @param context the query context + * @return an instance of {@link Health} for the asset, null if one cannot be computed. + */ + @Nullable + private Health computeAssertionHealthForAsset( + final String entityUrn, final QueryContext context) { + // Get active assertion urns + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); + + if (relationships.getTotal() > 0) { + + // If there are assertions defined, then we should return a non-null health for this asset. + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); + + final GenericTable assertionRunResults = + getAssertionRunsTable(context.getOperationContext(), entityUrn); + + if (!assertionRunResults.hasRows() || assertionRunResults.getRows().size() == 0) { + // No assertion run results found. Return empty health! + return null; + } + + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + + // Finally compute & return the health. + final Health health = new Health(); + health.setType(HealthStatusType.ASSERTIONS); + if (failingAssertionUrns.size() > 0) { + health.setStatus(HealthStatus.FAIL); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); + health.setCauses(failingAssertionUrns); + } else { + health.setStatus(HealthStatus.PASS); + health.setMessage("All assertions are passing"); + } + return health; + } + return null; + } + + private GenericTable getAssertionRunsTable( + @Nonnull OperationContext opContext, final String asserteeUrn) { + return _timeseriesAspectService.getAggregatedStats( + opContext, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + createAssertionAggregationSpecs(), + createAssertionsFilter(asserteeUrn), + createAssertionGroupingBuckets()); + } + + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + // Create the buckets based on the result + return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); + } + + private Filter createAssertionsFilter(final String datasetUrn) { + final Filter filter = new Filter(); + final ArrayList criteria = new ArrayList<>(); + + // Add filter for asserteeUrn == datasetUrn + Criterion datasetUrnCriterion = + new Criterion().setField("asserteeUrn").setCondition(Condition.EQUAL).setValue(datasetUrn); + criteria.add(datasetUrnCriterion); + + // Add filter for result == result + Criterion startTimeCriterion = + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + criteria.add(startTimeCriterion); + + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + return filter; + } + + private AggregationSpec[] createAssertionAggregationSpecs() { + // Simply fetch the timestamp, result type for the assertion URN. + AggregationSpec resultTypeAggregation = + new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); + AggregationSpec timestampAggregation = + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; + } + + private GroupingBucket[] createAssertionGroupingBuckets() { + // String grouping bucket on "assertionUrn" + GroupingBucket assertionUrnBucket = new GroupingBucket(); + assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); + return new GroupingBucket[] {assertionUrnBucket}; + } + + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { + final List failedAssertionUrns = new ArrayList<>(); + for (StringArray row : rows) { + // Result structure should be assertionUrn, event.result.type, timestampMillis + if (row.size() != 3) { + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); + } + + final String assertionUrn = row.get(0); + final String resultType = row.get(1); + + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + failedAssertionUrns.add(assertionUrn); + } + } + return failedAssertionUrns; + } + + @Data + @AllArgsConstructor + public static class Config { + private Boolean assertionsEnabled; + private Boolean incidentsEnabled; + } + + @AllArgsConstructor + private static class HealthStatuses { + private final List healths; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java new file mode 100644 index 00000000000000..d79634c27d881c --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -0,0 +1,128 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.types.incident.IncidentMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityIncidentsResolver + implements DataFetcher> { + + static final String INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME = "entities.keyword"; + static final String INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME = "state"; + static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; + + private final EntityClient _entityClient; + + public EntityIncidentsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final Optional maybeState = Optional.ofNullable(environment.getArgument("state")); + + try { + // Step 1: Fetch set of incidents associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildIncidentsEntityFilter(entityUrn, maybeState); + final List sortCriteria = buildIncidentsSortCriteria(); + final SearchResult searchResult = + _entityClient.filter( + context.getOperationContext(), + Constants.INCIDENT_ENTITY_NAME, + filter, + sortCriteria, + start, + count); + + final List incidentUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + null); + + // Step 3: Map GMS incident model to GraphQL model + final List entityResult = new ArrayList<>(); + for (Urn urn : incidentUrns) { + entityResult.add(entities.getOrDefault(urn, null)); + } + final List incidents = + entityResult.stream() + .filter(Objects::nonNull) + .map(i -> IncidentMapper.map(context, i)) + .collect(Collectors.toList()); + + // Step 4: Package and return result + final EntityIncidentsResult result = new EntityIncidentsResult(); + result.setCount(searchResult.getPageSize()); + result.setStart(searchResult.getFrom()); + result.setTotal(searchResult.getNumEntities()); + result.setIncidents(incidents); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private Filter buildIncidentsEntityFilter( + final String entityUrn, final Optional maybeState) { + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, entityUrn); + maybeState.ifPresent( + incidentState -> criterionMap.put(INCIDENT_STATE_SEARCH_INDEX_FIELD_NAME, incidentState)); + return QueryUtils.newFilter(criterionMap); + } + + private List buildIncidentsSortCriteria() { + final SortCriterion sortCriterion = new SortCriterion(); + sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + sortCriterion.setOrder(SortOrder.DESCENDING); + return Collections.singletonList(sortCriterion); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java new file mode 100644 index 00000000000000..454ba693da95a7 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java @@ -0,0 +1,132 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RaiseIncidentInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** Resolver used for creating (raising) a new asset incident. */ +@Slf4j +@RequiredArgsConstructor +public class RaiseIncidentResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + + final QueryContext context = environment.getContext(); + final RaiseIncidentInput input = + bindArgument(environment.getArgument("input"), RaiseIncidentInput.class); + final Urn resourceUrn = Urn.createFromString(input.getResourceUrn()); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!isAuthorizedToCreateIncidentForResource(resourceUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final IncidentKey key = new IncidentKey(); + + // Generate a random UUID for the incident + final String id = UUID.randomUUID().toString(); + key.setId(id); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + INCIDENT_ENTITY_NAME, + INCIDENT_INFO_ASPECT_NAME, + mapIncidentInfo(input, context)); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + log.error("Failed to create incident. {}", e.getMessage()); + throw new RuntimeException("Failed to incident", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private IncidentInfo mapIncidentInfo(final RaiseIncidentInput input, final QueryContext context) + throws URISyntaxException { + final IncidentInfo result = new IncidentInfo(); + result.setType( + IncidentType.valueOf( + input + .getType() + .name())); // Assumption Alert: This assumes that GMS incident type === GraphQL + // incident type. + result.setCustomType(input.getCustomType(), SetMode.IGNORE_NULL); + result.setTitle(input.getTitle(), SetMode.IGNORE_NULL); + result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + result.setEntities( + new UrnArray(ImmutableList.of(Urn.createFromString(input.getResourceUrn())))); + result.setCreated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis())); + // Create the incident in the 'active' state by default. + result.setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setLastUpdated( + new AuditStamp() + .setActor(Urn.createFromString(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + result.setSource(new IncidentSource().setType(IncidentSourceType.MANUAL), SetMode.IGNORE_NULL); + result.setPriority(input.getPriority(), SetMode.IGNORE_NULL); + return result; + } + + private boolean isAuthorizedToCreateIncidentForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java new file mode 100644 index 00000000000000..d51ceab31e60ec --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java @@ -0,0 +1,112 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; +import com.linkedin.datahub.graphql.generated.UpdateIncidentStatusInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; + +/** GraphQL Resolver that updates an incident's status */ +@RequiredArgsConstructor +public class UpdateIncidentStatusResolver implements DataFetcher> { + + private final EntityClient _entityClient; + private final EntityService _entityService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final Urn incidentUrn = Urn.createFromString(environment.getArgument("urn")); + final UpdateIncidentStatusInput input = + bindArgument(environment.getArgument("input"), UpdateIncidentStatusInput.class); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + + // Check whether the incident exists. + IncidentInfo info = + (IncidentInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + incidentUrn.toString(), + INCIDENT_INFO_ASPECT_NAME, + _entityService, + null); + + if (info != null) { + // Check whether the actor has permission to edit the incident + // Currently only supporting a single entity. TODO: Support multiple incident entities. + final Urn resourceUrn = info.getEntities().get(0); + if (isAuthorizedToUpdateIncident(resourceUrn, context)) { + info.setStatus( + new IncidentStatus() + .setState(IncidentState.valueOf(input.getState().name())) + .setLastUpdated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()))); + if (input.getMessage() != null) { + info.getStatus().setMessage(input.getMessage()); + } + try { + // Finally, create the MetadataChangeProposal. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + incidentUrn, INCIDENT_INFO_ASPECT_NAME, info); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to update incident status!", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + throw new DataHubGraphQLException( + "Failed to update incident. Incident does not exist.", + DataHubGraphQLErrorCode.NOT_FOUND); + }, + this.getClass().getSimpleName(), + "get"); + } + + private boolean isAuthorizedToUpdateIncident(final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 6a4af7563a8d8e..24d0e946145054 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,25 +1,29 @@ package com.linkedin.datahub.graphql.resolvers.ingest; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; +import static com.linkedin.metadata.Constants.INGESTION_SOURCE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.SECRETS_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; + +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(INGESTION_SOURCE_ENTITY_NAME)); } public static boolean canManageSecrets(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(SECRETS_ENTITY_NAME)); } - private IngestionAuthUtils() { } + private IngestionAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index 1140c031f1d355..3c3fed846e56a9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ExecutionRequest; import com.linkedin.datahub.graphql.generated.IngestionConfig; import com.linkedin.datahub.graphql.generated.IngestionSchedule; @@ -23,21 +24,23 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IngestionResolverUtils { - public static List mapExecutionRequests(final Collection requests) { + public static List mapExecutionRequests( + @Nullable QueryContext context, final Collection requests) { List result = new ArrayList<>(); for (final EntityResponse request : requests) { - result.add(mapExecutionRequest(request)); + result.add(mapExecutionRequest(context, request)); } return result; } - public static ExecutionRequest mapExecutionRequest(final EntityResponse entityResponse) { + public static ExecutionRequest mapExecutionRequest( + @Nullable QueryContext context, final EntityResponse entityResponse) { final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -46,40 +49,52 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe result.setId(entityUrn.getId()); // Map input aspect. Must be present. - final EnvelopedAspect envelopedInput = aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); + final EnvelopedAspect envelopedInput = + aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); if (envelopedInput != null) { - final ExecutionRequestInput executionRequestInput = new ExecutionRequestInput(envelopedInput.getValue().data()); - final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); + final ExecutionRequestInput executionRequestInput = + new ExecutionRequestInput(envelopedInput.getValue().data()); + final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = + new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); inputResult.setTask(executionRequestInput.getTask()); if (executionRequestInput.hasSource()) { inputResult.setSource(mapExecutionRequestSource(executionRequestInput.getSource())); } if (executionRequestInput.hasArgs()) { - inputResult.setArguments(StringMapMapper.map(executionRequestInput.getArgs())); + inputResult.setArguments(StringMapMapper.map(context, executionRequestInput.getArgs())); } inputResult.setRequestedAt(executionRequestInput.getRequestedAt()); + if (executionRequestInput.getActorUrn() != null) { + inputResult.setActorUrn(executionRequestInput.getActorUrn().toString()); + } result.setInput(inputResult); } // Map result aspect. Optional. - final EnvelopedAspect envelopedResult = aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + final EnvelopedAspect envelopedResult = + aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (envelopedResult != null) { - final ExecutionRequestResult executionRequestResult = new ExecutionRequestResult(envelopedResult.getValue().data()); + final ExecutionRequestResult executionRequestResult = + new ExecutionRequestResult(envelopedResult.getValue().data()); result.setResult(mapExecutionRequestResult(executionRequestResult)); } return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource + mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); result.setType(execRequestSource.getType()); return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult + mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); result.setStatus(execRequestResult.getStatus()); result.setStartTimeMs(execRequestResult.getStartTimeMs()); result.setDurationMs(execRequestResult.getDurationMs()); @@ -90,7 +105,8 @@ public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapE return result; } - public static StructuredReport mapStructuredReport(final StructuredExecutionReport structuredReport) { + public static StructuredReport mapStructuredReport( + final StructuredExecutionReport structuredReport) { StructuredReport structuredReportResult = new StructuredReport(); structuredReportResult.setType(structuredReport.getType()); structuredReportResult.setSerializedValue(structuredReport.getSerializedValue()); @@ -98,7 +114,8 @@ public static StructuredReport mapStructuredReport(final StructuredExecutionRepo return structuredReportResult; } - public static List mapIngestionSources(final Collection entities) { + public static List mapIngestionSources( + final Collection entities) { final List results = new ArrayList<>(); for (EntityResponse response : entities) { try { @@ -118,16 +135,19 @@ public static IngestionSource mapIngestionSource(final EntityResponse ingestionS final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); if (envelopedInfo == null) { - throw new IllegalStateException("No ingestion source info aspect exists for urn: " + entityUrn); + throw new IllegalStateException( + "No ingestion source info aspect exists for urn: " + entityUrn); } // Bind into a strongly typed object. - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); return mapIngestionSourceInfo(entityUrn, ingestionSourceInfo); } - public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHubIngestionSourceInfo info) { + public static IngestionSource mapIngestionSourceInfo( + final Urn urn, final DataHubIngestionSourceInfo info) { final IngestionSource result = new IngestionSource(); result.setUrn(urn.toString()); result.setName(info.getName()); @@ -139,29 +159,30 @@ public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHu return result; } - public static IngestionConfig mapIngestionSourceConfig(final DataHubIngestionSourceConfig config) { + public static IngestionConfig mapIngestionSourceConfig( + final DataHubIngestionSourceConfig config) { final IngestionConfig result = new IngestionConfig(); result.setRecipe(config.getRecipe()); result.setVersion(config.getVersion()); result.setExecutorId(config.getExecutorId()); result.setDebugMode(config.isDebugMode()); if (config.getExtraArgs() != null) { - List extraArgs = config.getExtraArgs() - .keySet() - .stream() - .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) - .collect(Collectors.toList()); + List extraArgs = + config.getExtraArgs().keySet().stream() + .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) + .collect(Collectors.toList()); result.setExtraArgs(extraArgs); } return result; } - public static IngestionSchedule mapIngestionSourceSchedule(final DataHubIngestionSourceSchedule schedule) { + public static IngestionSchedule mapIngestionSourceSchedule( + final DataHubIngestionSourceSchedule schedule) { final IngestionSchedule result = new IngestionSchedule(); result.setInterval(schedule.getInterval()); result.setTimezone(schedule.getTimezone()); return result; } - private IngestionResolverUtils() { } + private IngestionResolverUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java index 7f9cb6176989f8..a373e4254d0597 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java @@ -1,11 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -22,15 +27,9 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Cancels a requested ingestion execution by emitting a KILL signal. - */ -public class CancelIngestionExecutionRequestResolver implements DataFetcher> { +/** Cancels a requested ingestion execution by emitting a KILL signal. */ +public class CancelIngestionExecutionRequestResolver + implements DataFetcher> { private static final String KILL_EXECUTION_REQUEST_SIGNAL = "KILL"; @@ -44,45 +43,60 @@ public CancelIngestionExecutionRequestResolver(final EntityClient entityClient) public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { - if (IngestionAuthUtils.canManageIngestion(context)) { + final CancelIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); - final CancelIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); + try { + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + context.getOperationContext(), + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME)); - try { - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", ingestionSourceUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = - response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - // Build the arguments map. - final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); - execSignal.setSignal(KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. - execSignal.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execSignal.setCreatedAt(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(Urn.createFromString(context.getActorUrn())) - ); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn( - input.getExecutionRequestUrn()), EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, execSignal); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to submit cancel signal %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Build the arguments map. + final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); + execSignal.setSignal( + KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. + execSignal.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execSignal.setCreatedAt( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(input.getExecutionRequestUrn()), + EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, + execSignal); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to submit cancel signal %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java index ea20b837e0a1f6..160624a4b0fe57 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java @@ -1,10 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,15 +36,9 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ -public class CreateIngestionExecutionRequestResolver implements DataFetcher> { +/** Creates an on-demand ingestion execution request. */ +public class CreateIngestionExecutionRequestResolver + implements DataFetcher> { private static final String RUN_INGEST_TASK_NAME = "RUN_INGEST"; private static final String MANUAL_EXECUTION_SOURCE_NAME = "MANUAL_INGESTION_SOURCE"; @@ -49,7 +49,8 @@ public class CreateIngestionExecutionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final CreateIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - // Fetch the original ingestion source - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); - - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - - if (!ingestionSourceInfo.getConfig().hasRecipe()) { - throw new DataHubGraphQLException( - String.format("Failed to find valid ingestion source with urn %s. Missing recipe", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - // Build the arguments map. - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task - execInput.setSource( - new ExecutionRequestSource().setType(MANUAL_EXECUTION_SOURCE_NAME).setIngestionSource(ingestionSourceUrn)); - execInput.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - String recipe = ingestionSourceInfo.getConfig().getRecipe(); - recipe = injectRunId(recipe, executionRequestUrn.toString()); - recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); - arguments.put(RECIPE_ARG_NAME, recipe); - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().hasVersion() - ? ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion() - ); - if (ingestionSourceInfo.getConfig().hasVersion()) { - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); - } - String debugMode = "false"; - if (ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; - } - if (ingestionSourceInfo.getConfig().hasExtraArgs()) { - arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final CreateIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + // Fetch the original ingestion source + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + context.getOperationContext(), + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME)); + + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + + if (!ingestionSourceInfo.getConfig().hasRecipe()) { + throw new DataHubGraphQLException( + String.format( + "Failed to find valid ingestion source with urn %s. Missing recipe", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Build the arguments map. + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task + execInput.setSource( + new ExecutionRequestSource() + .setType(MANUAL_EXECUTION_SOURCE_NAME) + .setIngestionSource(ingestionSourceUrn)); + execInput.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execInput.setRequestedAt(System.currentTimeMillis()); + execInput.setActorUrn(UrnUtils.getUrn(context.getActorUrn())); + + Map arguments = new HashMap<>(); + String recipe = ingestionSourceInfo.getConfig().getRecipe(); + recipe = injectRunId(recipe, executionRequestUrn.toString()); + recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); + arguments.put(RECIPE_ARG_NAME, recipe); + arguments.put( + VERSION_ARG_NAME, + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); + if (ingestionSourceInfo.getConfig().hasVersion()) { + arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); + } + String debugMode = "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + } + if (ingestionSourceInfo.getConfig().hasExtraArgs()) { + arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + } + arguments.put(DEBUG_MODE_ARG_NAME, debugMode); + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new ingestion execution request %s", input), e); + } } - arguments.put(DEBUG_MODE_ARG_NAME, debugMode); - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, - EXECUTION_REQUEST_ENTITY_NAME, EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new ingestion execution request %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } /** - * Injects an override run id into a recipe for tracking purposes. Any existing run id will be overwritten. + * Injects an override run id into a recipe for tracking purposes. Any existing run id will be + * overwritten. * - * TODO: Determine if this should be handled in the executor itself. + *

TODO: Determine if this should be handled in the executor itself. * * @param runId the run id to place into the recipe * @return a modified recipe JSON string @@ -149,7 +175,8 @@ private String injectRunId(final String originalJson, final String runId) { return obj.toString(); } catch (JSONException e) { // This should ideally never be hit. - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided."); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided."); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java index 1886db62ae4502..aeb4dd4f948019 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java @@ -1,35 +1,33 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.execution.ExecutionRequestInput; import com.linkedin.execution.ExecutionRequestSource; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ +/** Creates an on-demand ingestion execution request. */ public class CreateTestConnectionRequestResolver implements DataFetcher> { private static final String TEST_CONNECTION_TASK_NAME = "TEST_CONNECTION"; @@ -41,7 +39,8 @@ public class CreateTestConnectionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final CreateTestConnectionRequestInput input = - bindArgument(environment.getArgument("input"), CreateTestConnectionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(TEST_CONNECTION_TASK_NAME); - execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); - execInput.setExecutorId(DEFAULT_EXECUTOR_ID); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - arguments.put(RECIPE_ARG_NAME, IngestionUtils.injectPipelineName(input.getRecipe(), executionRequestUrn.toString())); - if (input.getVersion() != null) { - arguments.put(VERSION_ARG_NAME, input.getVersion()); - } - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, EXECUTION_REQUEST_ENTITY_NAME, - EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new test ingestion connection request %s", input.toString()), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final CreateTestConnectionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateTestConnectionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(TEST_CONNECTION_TASK_NAME); + execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); + execInput.setExecutorId(DEFAULT_EXECUTOR_ID); + execInput.setRequestedAt(System.currentTimeMillis()); + execInput.setActorUrn(UrnUtils.getUrn(context.getActorUrn())); + + Map arguments = new HashMap<>(); + arguments.put( + RECIPE_ARG_NAME, + IngestionUtils.injectPipelineName( + input.getRecipe(), executionRequestUrn.toString())); + if (input.getVersion() != null) { + arguments.put(VERSION_ARG_NAME, input.getVersion()); + } + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to create new test ingestion connection request %s", input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 8880330d634953..474ab342256ac8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -19,12 +20,10 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Retrieves an Ingestion Execution Request by primary key (urn). - */ +/** Retrieves an Ingestion Execution Request by primary key (urn). */ @Slf4j -public class GetIngestionExecutionRequestResolver implements DataFetcher> { +public class GetIngestionExecutionRequestResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -33,32 +32,42 @@ public GetIngestionExecutionRequestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch specific execution request - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No execution request found - throw new DataHubGraphQLException(String.format("Failed to find Execution Request with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve execution request", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // Fetch specific execution request + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)); + if (!entities.containsKey(urn)) { + // No execution request found + throw new DataHubGraphQLException( + String.format("Failed to find Execution Request with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Execution request found + return IngestionResolverUtils.mapExecutionRequest(context, entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve execution request", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index c72f273a9027e2..a4c2ab42227d9c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -4,6 +4,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.IngestionSource; import com.linkedin.datahub.graphql.generated.IngestionSourceExecutionRequests; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionResolverUtils; @@ -22,6 +23,7 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -29,11 +31,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j -public class IngestionSourceExecutionRequestsResolver implements DataFetcher> { +public class IngestionSourceExecutionRequestsResolver + implements DataFetcher> { private static final String INGESTION_SOURCE_FIELD_NAME = "ingestionSource"; private static final String REQUEST_TIME_MS_FIELD_NAME = "requestTimeMs"; @@ -45,64 +46,81 @@ public IngestionSourceExecutionRequestsResolver(final EntityClient entityClient) } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final String urn = ((IngestionSource) environment.getSource()).getUrn(); - final Integer start = environment.getArgument("start") != null ? environment.getArgument("start") : 0; - final Integer count = environment.getArgument("count") != null ? environment.getArgument("count") : 10; + final Integer start = + environment.getArgument("start") != null ? environment.getArgument("start") : 0; + final Integer count = + environment.getArgument("count") != null ? environment.getArgument("count") : 10; - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { - try { + // 1. Fetch the related edges + final Criterion filterCriterion = + new Criterion() + .setField(INGESTION_SOURCE_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(urn); - // 1. Fetch the related edges - final Criterion filterCriterion = new Criterion() - .setField(INGESTION_SOURCE_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(urn); + final SearchResult executionsSearchResult = + _entityClient.filter( + context.getOperationContext(), + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + Collections.singletonList( + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + start, + count); - final SearchResult executionsSearchResult = _entityClient.filter( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - new SortCriterion().setField(REQUEST_TIME_MS_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication() - ); + // 2. Batch fetch the related ExecutionRequests + final Set relatedExecRequests = + executionsSearchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); - // 2. Batch fetch the related ExecutionRequests - final Set relatedExecRequests = executionsSearchResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.EXECUTION_REQUEST_ENTITY_NAME, + relatedExecRequests, + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - relatedExecRequests, - ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - - // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests - final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); - result.setStart(executionsSearchResult.getFrom()); - result.setCount(executionsSearchResult.getPageSize()); - result.setTotal(executionsSearchResult.getNumEntities()); - result.setExecutionRequests(IngestionResolverUtils.mapExecutionRequests( - executionsSearchResult.getEntities() - .stream() - .map(searchResult -> entities.get(searchResult.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - )); - return result; - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve executions associated with ingestion source with urn %s", urn), e); - } - }); + // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests + final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); + result.setStart(executionsSearchResult.getFrom()); + result.setCount(executionsSearchResult.getPageSize()); + result.setTotal(executionsSearchResult.getNumEntities()); + result.setExecutionRequests( + IngestionResolverUtils.mapExecutionRequests( + context, + executionsSearchResult.getEntities().stream() + .map(searchResult -> entities.get(searchResult.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve executions associated with ingestion source with urn %s", + urn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 05fcacf7c09460..62977b6b92676e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -1,17 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class RollbackIngestionResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -20,33 +20,41 @@ public RollbackIngestionResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final RollbackIngestionInput input = bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); - final String runId = input.getRunId(); - - rollbackIngestion(runId, context); - return true; - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final RollbackIngestionInput input = + bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); + final String runId = input.getRunId(); + + rollbackIngestion(runId, context); + return true; + }, + this.getClass().getSimpleName(), + "get"); } - public CompletableFuture rollbackIngestion(final String runId, final QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to rollback ingestion execution", e); - } - }); - + public CompletableFuture rollbackIngestion( + final String runId, final QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.rollbackIngestion( + context.getOperationContext(), runId, context.getAuthorizer()); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to rollback ingestion execution", e); + } + }, + this.getClass().getSimpleName(), + "rollbackIngestion"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index e1745031d9daed..8bd8325bf16c01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -1,39 +1,37 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateSecretInput; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the MANAGE_SECRETS privilege. + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. */ public class CreateSecretResolver implements DataFetcher> { private final EntityClient _entityClient; private final SecretService _secretService; - public CreateSecretResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + public CreateSecretResolver(final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @@ -41,36 +39,49 @@ public CreateSecretResolver( @Override public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateSecretInput input = bindArgument(environment.getArgument("input"), CreateSecretInput.class); + final CreateSecretInput input = + bindArgument(environment.getArgument("input"), CreateSecretInput.class); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { - if (IngestionAuthUtils.canManageSecrets(context)) { + try { + // Create the Ingestion source key --> use the display name as a unique id to ensure + // it's not duplicated. + final DataHubSecretKey key = new DataHubSecretKey(); + key.setId(input.getName()); - try { - // Create the Ingestion source key --> use the display name as a unique id to ensure it's not duplicated. - final DataHubSecretKey key = new DataHubSecretKey(); - key.setId(input.getName()); + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME))) { + throw new IllegalArgumentException("This Secret already exists!"); + } - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Secret already exists!"); - } - - // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + // Create the secret value. + final DataHubSecretValue value = + DataHubSecretValueMapper.map( + null, + input.getName(), + _secretService.encrypt(input.getValue()), + input.getDescription(), + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, SECRETS_ENTITY_NAME, - SECRET_VALUE_ASPECT_NAME, value); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new secret with name %s", input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, SECRETS_ENTITY_NAME, SECRET_VALUE_ASPECT_NAME, value); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new secret with name %s", input.getName()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java index b35931420c0787..da81d560c6dbd6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java @@ -2,6 +2,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; import com.linkedin.entity.client.EntityClient; @@ -9,10 +10,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. - */ +/** Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. */ public class DeleteSecretResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -25,17 +23,23 @@ public DeleteSecretResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final String secretUrn = environment.getArgument("urn"); - final Urn urn = Urn.createFromString(secretUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return secretUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against secret with urn %s", secretUrn), e); - } - }); + final String inputUrn = environment.getArgument("urn"); + final Urn urn = Urn.createFromString(inputUrn); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(context.getOperationContext(), urn); + return inputUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against secret with urn %s", inputUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 85c6c6754470db..100965d457a1de 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetSecretValuesInput; import com.linkedin.datahub.graphql.generated.SecretValue; @@ -11,10 +14,10 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -23,11 +26,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / decryption. - * Requires the MANAGE_SECRETS privilege. + * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / + * decryption. Requires the MANAGE_SECRETS privilege. */ public class GetSecretValuesResolver implements DataFetcher>> { @@ -35,60 +36,69 @@ public class GetSecretValuesResolver implements DataFetcher> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final GetSecretValuesInput input = bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); + final GetSecretValuesInput input = + bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch secrets - final Set urns = input.getSecrets() - .stream() - .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) - .collect(Collectors.toSet()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // Fetch secrets + final Set urns = + input.getSecrets().stream() + .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) + .collect(Collectors.toSet()); - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(urns), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.SECRETS_ENTITY_NAME, + new HashSet<>(urns), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)); - // Now for each secret, decrypt and return the value. If no secret was found, then we will simply omit it from the list. - // There is no ordering guarantee for the list. - return entities.values() - .stream() - .map(entity -> { - EnvelopedAspect aspect = entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); - if (aspect != null) { - // Aspect is present. - final DataHubSecretValue secretValue = new DataHubSecretValue(aspect.getValue().data()); - // Now decrypt the encrypted secret. - final String decryptedSecretValue = decryptSecret(secretValue.getValue()); - return new SecretValue(secretValue.getName(), decryptedSecretValue); - } else { - // No secret exists - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + // Now for each secret, decrypt and return the value. If no secret was found, then we + // will simply omit it from the list. + // There is no ordering guarantee for the list. + return entities.values().stream() + .map( + entity -> { + EnvelopedAspect aspect = + entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); + if (aspect != null) { + // Aspect is present. + final DataHubSecretValue secretValue = + new DataHubSecretValue(aspect.getValue().data()); + // Now decrypt the encrypted secret. + final String decryptedSecretValue = decryptSecret(secretValue.getValue()); + return new SecretValue(secretValue.getName(), decryptedSecretValue); + } else { + // No secret exists + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private String decryptSecret(final String encryptedSecret) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index b0d8c9fd34303c..bf8d7c800ccae6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.ListSecretsInput; import com.linkedin.datahub.graphql.generated.ListSecretsResult; @@ -14,7 +18,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -23,6 +26,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -31,13 +35,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. - */ +/** Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. */ @Slf4j public class ListSecretsResolver implements DataFetcher> { @@ -52,55 +50,70 @@ public ListSecretsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final ListSecretsInput input = bindArgument(environment.getArgument("input"), ListSecretsInput.class); + final ListSecretsInput input = + bindArgument(environment.getArgument("input"), ListSecretsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all secrets - final SearchResult gmsResult = _entityClient.search( - Constants.SECRETS_ENTITY_NAME, - query, - null, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - // Then, resolve all secrets - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListSecretsResult result = new ListSecretsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setSecrets(mapEntities(gmsResult.getEntities().stream() - .map(entity -> entities.get(entity.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()))); - return result; - - } catch (Exception e) { - throw new RuntimeException("Failed to list secrets", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all secrets + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + Constants.SECRETS_ENTITY_NAME, + query, + null, + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + start, + count); + + // Then, resolve all secrets + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.SECRETS_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)); + + // Now that we have entities we can bind this to a result. + final ListSecretsResult result = new ListSecretsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setSecrets( + mapEntities( + gmsResult.getEntities().stream() + .map(entity -> entities.get(entity.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + + } catch (Exception e) { + throw new RuntimeException("Failed to list secrets", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final List entities) { @@ -113,7 +126,8 @@ private List mapEntities(final List entities) { final EnvelopedAspect envelopedInfo = aspects.get(Constants.SECRET_VALUE_ASPECT_NAME); // Bind into a strongly typed object. - final DataHubSecretValue secretValue = new DataHubSecretValue(envelopedInfo.getValue().data()); + final DataHubSecretValue secretValue = + new DataHubSecretValue(envelopedInfo.getValue().data()); // Map using the strongly typed object. results.add(mapSecretValue(entityUrn, secretValue)); @@ -128,4 +142,4 @@ private Secret mapSecretValue(final Urn urn, final DataHubSecretValue value) { result.setDescription(value.getDescription(GetMode.NULL)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java index e510a9fff80aa4..87a3e5cb79ebfc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java @@ -8,10 +8,7 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - -/** - * Utility methods to encrypt and decrypt DataHub secrets. - */ +/** Utility methods to encrypt and decrypt DataHub secrets. */ public class SecretUtils { static String encrypt(String value, String secret) { @@ -28,9 +25,10 @@ static String encrypt(String value, String secret) { } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } - Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); + Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.ENCRYPT_MODE, secretKey); - return Base64.getEncoder().encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); + return Base64.getEncoder() + .encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { throw new RuntimeException("Failed to encrypt value using provided secret!"); } @@ -50,7 +48,7 @@ static String decrypt(String encryptedValue, String secret) { } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } - Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5PADDING"); + Cipher cipher = Cipher.getInstance("AES"); cipher.init(Cipher.DECRYPT_MODE, secretKey); return new String(cipher.doFinal(Base64.getDecoder().decode(encryptedValue))); } catch (Exception e) { @@ -59,6 +57,5 @@ static String decrypt(String encryptedValue, String secret) { return null; } - private SecretUtils() { - } + private SecretUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java new file mode 100644 index 00000000000000..e23dd800478d40 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.buildMetadataChangeProposalWithUrn; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; +import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. + */ +@Slf4j +@RequiredArgsConstructor +public class UpdateSecretResolver implements DataFetcher> { + private final EntityClient entityClient; + private final SecretService secretService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final UpdateSecretInput input = + bindArgument(environment.getArgument("input"), UpdateSecretInput.class); + final Urn secretUrn = Urn.createFromString(input.getUrn()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { + + try { + EntityResponse response = + entityClient.getV2( + context.getOperationContext(), + secretUrn.getEntityType(), + secretUrn, + Set.of(SECRET_VALUE_ASPECT_NAME)); + if (!entityClient.exists(context.getOperationContext(), secretUrn) + || response == null) { + throw new IllegalArgumentException( + String.format("Secret for urn %s doesn't exists!", secretUrn)); + } + + DataHubSecretValue updatedVal = + DataHubSecretValueMapper.map( + response, + input.getName(), + secretService.encrypt(input.getValue()), + input.getDescription(), + null); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + secretUrn, SECRET_VALUE_ASPECT_NAME, updatedVal); + return entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update a secret with urn %s and name %s", + secretUrn, input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java index 38050331318ca8..4d693f2acab308 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java @@ -2,6 +2,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils; import com.linkedin.entity.client.EntityClient; @@ -9,10 +10,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - /** - * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires MANAGE_INGESTION - * privilege. + * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires + * MANAGE_INGESTION privilege. */ public class DeleteIngestionSourceResolver implements DataFetcher> { @@ -28,15 +28,23 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageIngestion(context)) { final String ingestionSourceUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(ingestionSourceUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return ingestionSourceUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against ingestion source with urn %s", ingestionSourceUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(context.getOperationContext(), urn); + return ingestionSourceUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against ingestion source with urn %s", + ingestionSourceUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java index 562d06b79d2c79..3dcbf7b1f7b8e9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -19,9 +20,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -/** - * Gets a particular Ingestion Source by urn. - */ +/** Gets a particular Ingestion Source by urn. */ @Slf4j public class GetIngestionSourceResolver implements DataFetcher> { @@ -32,31 +31,39 @@ public GetIngestionSourceResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No ingestion source found - throw new DataHubGraphQLException(String.format("Failed to find Ingestion Source with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Ingestion source found - return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve ingestion source", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)); + if (!entities.containsKey(urn)) { + // No ingestion source found + throw new DataHubGraphQLException( + String.format("Failed to find Ingestion Source with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Ingestion source found + return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve ingestion source", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index d019473606e58e..1a2806224e4a92 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListIngestionSourcesInput; @@ -12,7 +15,6 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -26,12 +28,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. - */ -public class ListIngestionSourcesResolver implements DataFetcher> { +/** Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. */ +public class ListIngestionSourcesResolver + implements DataFetcher> { private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,57 +43,80 @@ public ListIngestionSourcesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { - final ListIngestionSourcesInput input = bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); + final ListIngestionSourcesInput input = + bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all ingestion sources Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.INGESTION_SOURCE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - null, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all ingestion sources Urns. + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + Constants.INGESTION_SOURCE_ENTITY_NAME, + query, + buildFilter( + filters, + Collections.emptyList(), + context.getOperationContext().getAspectRetriever()), + null, + start, + count); - // Then, resolve all ingestion sources - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), - context.getAuthentication()); + // Then, resolve all ingestion sources + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)); - final Collection sortedEntities = entities.values() - .stream() - .sorted(Comparator.comparingLong(s -> -s.getAspects().get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME).getCreated().getTime())) - .collect(Collectors.toList()); + final Collection sortedEntities = + entities.values().stream() + .sorted( + Comparator.comparingLong( + s -> + -s.getAspects() + .get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME) + .getCreated() + .getTime())) + .collect(Collectors.toList()); - // Now that we have entities we can bind this to a result. - final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setIngestionSources(IngestionResolverUtils.mapIngestionSources(sortedEntities)); - return result; + // Now that we have entities we can bind this to a result. + final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setIngestionSources( + IngestionResolverUtils.mapIngestionSources(sortedEntities)); + return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list ingestion sources", e); - } - }); + } catch (Exception e) { + throw new RuntimeException("Failed to list ingestion sources", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java index 68e334bd976f8e..12266db05b6d10 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java @@ -1,8 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -19,23 +24,19 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; +import java.time.DateTimeException; +import java.time.ZoneId; +import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.springframework.scheduling.support.CronExpression; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. - */ +/** Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. */ @Slf4j public class UpsertIngestionSourceResolver implements DataFetcher> { @@ -49,46 +50,58 @@ public UpsertIngestionSourceResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final Optional ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); - final UpdateIngestionSourceInput input = bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + final Optional ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); + final UpdateIngestionSourceInput input = + bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + + // Create the policy info. + final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); + final MetadataChangeProposal proposal; + if (ingestionSourceUrn.isPresent()) { + // Update existing ingestion source + try { + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); + } catch (URISyntaxException e) { + throw new DataHubGraphQLException( + String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } else { + // Create new ingestion source + // Since we are creating a new Ingestion Source, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); + key.setId(uuidStr); + proposal = + buildMetadataChangeProposalWithKey( + key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); + } - // Create the policy info. - final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); - final MetadataChangeProposal proposal; - if (ingestionSourceUrn.isPresent()) { - // Update existing ingestion source + return GraphQLConcurrencyUtils.supplyAsync( + () -> { try { - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); - } catch (URISyntaxException e) { - throw new DataHubGraphQLException( - String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), - DataHubGraphQLErrorCode.BAD_REQUEST); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform update against ingestion source with urn %s", + input.toString()), + e); } - } else { - // Create new ingestion source - // Since we are creating a new Ingestion Source, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); - key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); - } - - try { - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against ingestion source with urn %s", input.toString()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + }, + this.getClass().getSimpleName(), + "get"); } - private DataHubIngestionSourceInfo mapIngestionSourceInfo(final UpdateIngestionSourceInput input) { + private DataHubIngestionSourceInfo mapIngestionSourceInfo( + final UpdateIngestionSourceInput input) { final DataHubIngestionSourceInfo result = new DataHubIngestionSourceInfo(); result.setType(input.getType()); result.setName(input.getName()); @@ -113,18 +126,49 @@ private DataHubIngestionSourceConfig mapConfig(final UpdateIngestionSourceConfig result.setDebugMode(input.getDebugMode()); } if (input.getExtraArgs() != null) { - Map extraArgs = input.getExtraArgs() - .stream() - .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + Map extraArgs = + input.getExtraArgs().stream() + .collect( + Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); result.setExtraArgs(new StringMap(extraArgs)); } return result; } - private DataHubIngestionSourceSchedule mapSchedule(final UpdateIngestionSourceScheduleInput input) { + private DataHubIngestionSourceSchedule mapSchedule( + final UpdateIngestionSourceScheduleInput input) { + + final String modifiedCronInterval = adjustCronInterval(input.getInterval()); + try { + CronExpression.parse(modifiedCronInterval); + } catch (IllegalArgumentException e) { + throw new DataHubGraphQLException( + String.format("Invalid cron schedule `%s`: %s", input.getInterval(), e.getMessage()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + try { + ZoneId.of(input.getTimezone()); + } catch (DateTimeException e) { + throw new DataHubGraphQLException( + String.format("Invalid timezone `%s`: %s", input.getTimezone(), e.getMessage()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + final DataHubIngestionSourceSchedule result = new DataHubIngestionSourceSchedule(); result.setInterval(input.getInterval()); result.setTimezone(input.getTimezone()); return result; } + + // Copied from IngestionScheduler.java + private String adjustCronInterval(final String origCronInterval) { + Objects.requireNonNull(origCronInterval, "origCronInterval must not be null"); + // Typically we support 5-character cron. Spring's lib only supports 6 character cron so we make + // an adjustment here. + final String[] originalCronParts = origCronInterval.split(" "); + if (originalCronParts.length == 5) { + return String.format("0 %s", origCronInterval); + } + return origCronInterval; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index ea61b5e258d8bb..09039e530631d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.DataProcessInstanceResult; import com.linkedin.datahub.graphql.generated.Entity; @@ -25,6 +26,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -32,10 +34,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -/** - * GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job - */ -public class DataJobRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job */ +public class DataJobRunsResolver + implements DataFetcher> { private static final String PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME = "parentTemplate"; private static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; @@ -48,81 +49,85 @@ public DataJobRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn); + final List sortCriteria = buildTaskRunsSortCriteria(); + final SearchResult gmsResult = + _entityClient.filter( + context.getOperationContext(), + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriteria, + start, + count); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS incident model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(p -> DataProcessInstanceMapper.map(context, p)) + .collect(Collectors.toList()); - // Step 3: Map GMS incident model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); - - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } private Filter buildTaskRunsEntityFilter(final String entityUrn) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } - private SortCriterion buildTaskRunsSortCriterion() { + private List buildTaskRunsSortCriteria() { final SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); sortCriterion.setOrder(SortOrder.DESCENDING); - return sortCriterion; + return Collections.singletonList(sortCriterion); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 3ecf396f808b38..82c5b73d871525 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.DataProcessInstanceResult; import com.linkedin.datahub.graphql.generated.Entity; @@ -26,6 +27,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -33,11 +35,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of task runs associated with a Dataset. - */ -public class EntityRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of task runs associated with a Dataset. */ +public class EntityRunsResolver + implements DataFetcher> { private static final String INPUT_FIELD_NAME = "inputs.keyword"; private static final String OUTPUT_FIELD_NAME = "outputs.keyword"; @@ -51,83 +51,93 @@ public EntityRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); - final RelationshipDirection direction = RelationshipDirection.valueOf(environment.getArgumentOrDefault("direction", - RelationshipDirection.INCOMING.toString())); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final RelationshipDirection direction = + RelationshipDirection.valueOf( + environment.getArgumentOrDefault( + "direction", RelationshipDirection.INCOMING.toString())); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); + final List sortCriteria = buildTaskRunsSortCriteria(); + final SearchResult gmsResult = + _entityClient.filter( + context.getOperationContext(), + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriteria, + start, + count); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null); - // Step 3: Map GMS instance model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS instance model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(p -> DataProcessInstanceMapper.map(context, p)) + .collect(Collectors.toList()); - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private Filter buildTaskRunsEntityFilter(final String entityUrn, final RelationshipDirection direction) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(direction.equals(RelationshipDirection.INCOMING) ? INPUT_FIELD_NAME : OUTPUT_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + private Filter buildTaskRunsEntityFilter( + final String entityUrn, final RelationshipDirection direction) { + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField( + direction.equals(RelationshipDirection.INCOMING) + ? INPUT_FIELD_NAME + : OUTPUT_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } - private SortCriterion buildTaskRunsSortCriterion() { + private List buildTaskRunsSortCriteria() { final SortCriterion sortCriterion = new SortCriterion(); sortCriterion.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); sortCriterion.setOrder(SortOrder.DESCENDING); - return sortCriterion; + return Collections.singletonList(sortCriterion); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 8fc3a609006626..d462fb0820aa03 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,27 +1,25 @@ package com.linkedin.datahub.graphql.resolvers.lineage; -import com.datahub.authorization.ConjunctivePrivilegeGroup; +import static com.datahub.authorization.AuthUtil.buildDisjunctivePrivilegeGroup; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.LineageEdge; import com.linkedin.datahub.graphql.generated.UpdateLineageInput; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -29,25 +27,28 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor public class UpdateLineageResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final LineageService _lineageService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - final UpdateLineageInput input = bindArgument(environment.getArgument("input"), UpdateLineageInput.class); + final UpdateLineageInput input = + bindArgument(environment.getArgument("input"), UpdateLineageInput.class); final List edgesToAdd = input.getEdgesToAdd(); final List edgesToRemove = input.getEdgesToRemove(); - // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage for each entity + // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage + // for each entity checkPrivileges(context, edgesToAdd, edgesToRemove); // organize data to make updating lineage cleaner @@ -57,77 +58,126 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw downstreamUrns.addAll(downstreamToUpstreamsToAdd.keySet()); downstreamUrns.addAll(downstreamToUpstreamsToRemove.keySet()); - return CompletableFuture.supplyAsync(() -> { - // build MCP for every downstreamUrn - for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); - } - - final List upstreamUrnsToAdd = downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); - final List upstreamUrnsToRemove = downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); - try { - switch (downstreamUrn.getEntityType()) { - case Constants.DATASET_ENTITY_NAME: - // need to filter out dataJobs since this is a valid lineage edge, but will be handled in the downstream direction for DataJobInputOutputs - final List filteredUpstreamUrnsToAdd = filterOutDataJobUrns(upstreamUrnsToAdd); - final List filteredUpstreamUrnsToRemove = filterOutDataJobUrns(upstreamUrnsToRemove); - - _lineageService.updateDatasetLineage(downstreamUrn, filteredUpstreamUrnsToAdd, filteredUpstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.CHART_ENTITY_NAME: - _lineageService.updateChartLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DASHBOARD_ENTITY_NAME: - _lineageService.updateDashboardLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DATA_JOB_ENTITY_NAME: - _lineageService.updateDataJobUpstreamLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - default: + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Set existingDownstreamUrns = + _entityService.exists(context.getOperationContext(), downstreamUrns, true); + + // build MCP for every downstreamUrn + for (Urn downstreamUrn : downstreamUrns) { + if (!existingDownstreamUrns.contains(downstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); + } + + final List upstreamUrnsToAdd = + downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); + final List upstreamUrnsToRemove = + downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); + try { + switch (downstreamUrn.getEntityType()) { + case Constants.DATASET_ENTITY_NAME: + // need to filter out dataJobs since this is a valid lineage edge, but will be + // handled in the downstream direction for DataJobInputOutputs + final List filteredUpstreamUrnsToAdd = + filterOutDataJobUrns(upstreamUrnsToAdd); + final List filteredUpstreamUrnsToRemove = + filterOutDataJobUrns(upstreamUrnsToRemove); + + _lineageService.updateDatasetLineage( + context.getOperationContext(), + downstreamUrn, + filteredUpstreamUrnsToAdd, + filteredUpstreamUrnsToRemove, + actor); + break; + case Constants.CHART_ENTITY_NAME: + _lineageService.updateChartLineage( + context.getOperationContext(), + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor); + break; + case Constants.DASHBOARD_ENTITY_NAME: + _lineageService.updateDashboardLineage( + context.getOperationContext(), + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor); + break; + case Constants.DATA_JOB_ENTITY_NAME: + _lineageService.updateDataJobUpstreamLineage( + context.getOperationContext(), + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor); + break; + default: + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", downstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", downstreamUrn), e); - } - } - - Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); - Map> upstreamToDownstreamsToRemove = getUpstreamToDownstreamMap(edgesToRemove); - Set upstreamUrns = new HashSet<>(); - upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); - upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); - - // build MCP for upstreamUrn if necessary - for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); - } - - final List downstreamUrnsToAdd = upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); - final List downstreamUrnsToRemove = upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); - try { - if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - // need to filter out dataJobs since this is a valid lineage edge, but is handled in the upstream direction for DataJobs - final List filteredDownstreamUrnsToAdd = filterOutDataJobUrns(downstreamUrnsToAdd); - final List filteredDownstreamUrnsToRemove = filterOutDataJobUrns(downstreamUrnsToRemove); - - _lineageService.updateDataJobDownstreamLineage( - upstreamUrn, filteredDownstreamUrnsToAdd, filteredDownstreamUrnsToRemove, actor, context.getAuthentication() - ); + + Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); + Map> upstreamToDownstreamsToRemove = + getUpstreamToDownstreamMap(edgesToRemove); + Set upstreamUrns = new HashSet<>(); + upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); + upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + + final Set existingUpstreamUrns = + _entityService.exists(context.getOperationContext(), upstreamUrns, true); + + // build MCP for upstreamUrn if necessary + for (Urn upstreamUrn : upstreamUrns) { + if (!existingUpstreamUrns.contains(upstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); + } + + final List downstreamUrnsToAdd = + upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); + final List downstreamUrnsToRemove = + upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); + try { + if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + // need to filter out dataJobs since this is a valid lineage edge, but is handled in + // the upstream direction for DataJobs + final List filteredDownstreamUrnsToAdd = + filterOutDataJobUrns(downstreamUrnsToAdd); + final List filteredDownstreamUrnsToRemove = + filterOutDataJobUrns(downstreamUrnsToRemove); + + _lineageService.updateDataJobDownstreamLineage( + context.getOperationContext(), + upstreamUrn, + filteredDownstreamUrnsToAdd, + filteredDownstreamUrnsToRemove, + actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", upstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", upstreamUrn), e); - } - } - return true; - }); + return true; + }, + this.getClass().getSimpleName(), + "get"); } private List filterOutDataJobUrns(@Nonnull final List urns) { - return urns.stream().filter( - upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME) - ).collect(Collectors.toList()); + return urns.stream() + .filter(upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); } private Map> getDownstreamToUpstreamsMap(@Nonnull final List edges) { @@ -156,7 +206,10 @@ private Map> getUpstreamToDownstreamMap(@Nonnull final List edgesToAdd, - @Nonnull final List edgesToRemove - ) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup editLineagePrivileges = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + @Nonnull final List edgesToRemove) { + + DisjunctivePrivilegeGroup editLineagePrivileges = + buildDisjunctivePrivilegeGroup(LINEAGE, UPDATE, null); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java index 023686b1d10c99..7f031cb4818529 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java @@ -8,21 +8,19 @@ import java.util.concurrent.CompletableFuture; import org.dataloader.DataLoader; - /** * Generic GraphQL resolver responsible for * - * 1. Generating a single input AspectLoadKey. - * 2. Resolving a single {@link Aspect}. - * + *

1. Generating a single input AspectLoadKey. 2. Resolving a single {@link Aspect}. */ public class AspectResolver implements DataFetcher> { - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader("Aspect"); - final String fieldName = environment.getField().getName(); - final Long version = environment.getArgument("version"); - final String urn = ((Entity) environment.getSource()).getUrn(); - return loader.load(new VersionedAspectKey(urn, fieldName, version)); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader("Aspect"); + final String fieldName = environment.getField().getName(); + final Long version = environment.getArgument("version"); + final String urn = ((Entity) environment.getSource()).getUrn(); + return loader.load(new VersionedAspectKey(urn, fieldName, version)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index 20e0e4ae1c22a9..2519d91aa3a84d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -3,17 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; +import graphql.execution.DataFetcherResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +@Slf4j public class BatchGetEntitiesResolver implements DataFetcher>> { private final List> _entityTypes; @@ -21,8 +24,7 @@ public class BatchGetEntitiesResolver implements DataFetcher> entityTypes, - final Function> entitiesProvider - ) { + final Function> entitiesProvider) { _entityTypes = entityTypes; _entitiesProvider = entitiesProvider; } @@ -32,22 +34,59 @@ public CompletableFuture> get(DataFetchingEnvironment environment) final List entities = _entitiesProvider.apply(environment); Map> entityTypeToEntities = new HashMap<>(); - entities.forEach((entity) -> { + Map> entityIndexMap = new HashMap<>(); + int index = 0; + for (Entity entity : entities) { + List indexList = new ArrayList<>(); + if (entityIndexMap.containsKey(entity.getUrn())) { + indexList = entityIndexMap.get(entity.getUrn()); + } + indexList.add(index); + entityIndexMap.put(entity.getUrn(), indexList); + index++; EntityType type = entity.getType(); List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); entitiesList.add(entity); entityTypeToEntities.put(type, entitiesList); - }); + } List>> entitiesFutures = new ArrayList<>(); for (Map.Entry> entry : entityTypeToEntities.entrySet()) { - CompletableFuture> entitiesFuture = BatchLoadUtils - .batchLoadEntitiesOfSameType(entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); + CompletableFuture> entitiesFuture = + BatchLoadUtils.batchLoadEntitiesOfSameType( + entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); entitiesFutures.add(entitiesFuture); } return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) - .thenApply(v -> entitiesFutures.stream().flatMap(future -> future.join().stream()).collect(Collectors.toList())); + .thenApply( + v -> { + Entity[] finalEntityList = new Entity[entities.size()]; + // Returned objects can be either of type Entity or wrapped as + // DataFetcherResult + // Therefore we need to be working with raw Objects in this area of the code + List returnedList = + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList()); + for (Object element : returnedList) { + Entity entity = null; + if (element instanceof DataFetcherResult) { + entity = ((DataFetcherResult) element).getData(); + } else if (element instanceof Entity) { + entity = (Entity) element; + } else { + throw new RuntimeException( + String.format( + "Cannot process entity because it is neither an Entity not a DataFetcherResult. %s", + element)); + } + for (int idx : entityIndexMap.get(entity.getUrn())) { + finalEntityList[idx] = entity; + } + } + return Arrays.asList(finalEntityList); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index d44f2b77029f35..d872ffad2783db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -1,98 +1,155 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; + +import com.datahub.authorization.AuthorizationConfiguration; +import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.LineageInput; import com.linkedin.datahub.graphql.generated.LineageRelationship; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.graph.SiblingGraphService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.net.URISyntaxException; +import io.datahubproject.metadata.services.RestrictedService; import java.util.HashSet; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub + * graph. Lineage relationship denotes whether an entity is directly upstream or downstream of + * another entity */ @Slf4j -public class EntityLineageResultResolver implements DataFetcher> { +public class EntityLineageResultResolver + implements DataFetcher> { private final SiblingGraphService _siblingGraphService; + private final RestrictedService _restrictedService; + private final AuthorizationConfiguration _authorizationConfiguration; - public EntityLineageResultResolver(final SiblingGraphService siblingGraphService) { + public EntityLineageResultResolver( + final SiblingGraphService siblingGraphService, + final RestrictedService restrictedService, + final AuthorizationConfiguration authorizationConfiguration) { _siblingGraphService = siblingGraphService; + _restrictedService = restrictedService; + _authorizationConfiguration = authorizationConfiguration; } @Override public CompletableFuture get(DataFetchingEnvironment environment) { - final String urn = ((Entity) environment.getSource()).getUrn(); + final QueryContext context = environment.getContext(); + Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); + @Nullable final Integer start = input.getStart(); // Optional! + @Nullable final Integer count = input.getCount(); // Optional! + @Nullable final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! + @Nullable final Long startTimeMillis = input.getStartTimeMillis(); // Optional! @Nullable - final Integer start = input.getStart(); // Optional! - @Nullable - final Integer count = input.getCount(); // Optional! - @Nullable - final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! - @Nullable - final Long startTimeMillis = input.getStartTimeMillis(); // Optional! - @Nullable - final Long endTimeMillis = input.getEndTimeMillis(); // Optional! + final Long endTimeMillis = + ResolverUtils.getLineageEndTimeMillis( + input.getStartTimeMillis(), input.getEndTimeMillis()); // Optional! com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - return mapEntityRelationships( - _siblingGraphService.getLineage( - Urn.createFromString(urn), - resolvedDirection, - start != null ? start : 0, - count != null ? count : 100, - 1, - separateSiblings != null ? input.getSeparateSiblings() : false, - new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); - } - }); + final Urn finalUrn = urn; + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + com.linkedin.metadata.graph.EntityLineageResult entityLineageResult = + _siblingGraphService.getLineage( + context + .getOperationContext() + .withLineageFlags( + flags -> + flags + .setStartTimeMillis(startTimeMillis, SetMode.REMOVE_IF_NULL) + .setEndTimeMillis(endTimeMillis, SetMode.REMOVE_IF_NULL)), + finalUrn, + resolvedDirection, + start != null ? start : 0, + count != null ? count : 100, + 1, + separateSiblings != null ? input.getSeparateSiblings() : false, + new HashSet<>()); + + Set restrictedUrns = new HashSet<>(); + entityLineageResult + .getRelationships() + .forEach( + rel -> { + if (_authorizationConfiguration.getView().isEnabled() + && !AuthorizationUtils.canViewRelationship( + context.getOperationContext(), rel.getEntity(), urn)) { + restrictedUrns.add(rel.getEntity()); + } + }); + + return mapEntityRelationships(context, entityLineageResult, restrictedUrns); + } catch (Exception e) { + log.error("Failed to fetch lineage for {}", finalUrn); + throw new RuntimeException( + String.format("Failed to fetch lineage for %s", finalUrn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } private EntityLineageResult mapEntityRelationships( - final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult) { + @Nullable final QueryContext context, + final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult, + final Set restrictedUrns) { final EntityLineageResult result = new EntityLineageResult(); result.setStart(entityLineageResult.getStart()); result.setCount(entityLineageResult.getCount()); result.setTotal(entityLineageResult.getTotal()); result.setFiltered(entityLineageResult.getFiltered()); - result.setRelationships(entityLineageResult.getRelationships() - .stream() - .map(this::mapEntityRelationship) - .collect(Collectors.toList())); + result.setRelationships( + entityLineageResult.getRelationships().stream() + .map(r -> mapEntityRelationship(context, r, restrictedUrns)) + .collect(Collectors.toList())); return result; } private LineageRelationship mapEntityRelationship( - final com.linkedin.metadata.graph.LineageRelationship lineageRelationship) { + @Nullable final QueryContext context, + final com.linkedin.metadata.graph.LineageRelationship lineageRelationship, + final Set restrictedUrns) { final LineageRelationship result = new LineageRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(lineageRelationship.getEntity()); - if (partialEntity != null) { - result.setEntity(partialEntity); + if (restrictedUrns.contains(lineageRelationship.getEntity())) { + final Restricted restrictedEntity = new Restricted(); + restrictedEntity.setType(EntityType.RESTRICTED); + String restrictedUrnString = + _restrictedService.encryptRestrictedUrn(lineageRelationship.getEntity()).toString(); + + restrictedEntity.setUrn(restrictedUrnString); + result.setEntity(restrictedEntity); + } else { + final Entity partialEntity = UrnToEntityMapper.map(context, lineageRelationship.getEntity()); + if (partialEntity != null) { + result.setEntity(partialEntity); + } } result.setType(lineageRelationship.getType()); result.setDegree(lineageRelationship.getDegree()); @@ -101,16 +158,21 @@ private LineageRelationship mapEntityRelationship( } if (lineageRelationship.hasCreatedActor()) { final Urn createdActor = lineageRelationship.getCreatedActor(); - result.setCreatedActor(UrnToEntityMapper.map(createdActor)); + result.setCreatedActor(UrnToEntityMapper.map(context, createdActor)); } if (lineageRelationship.hasUpdatedOn()) { result.setUpdatedOn(lineageRelationship.getUpdatedOn()); } if (lineageRelationship.hasUpdatedActor()) { final Urn updatedActor = lineageRelationship.getUpdatedActor(); - result.setUpdatedActor(UrnToEntityMapper.map(updatedActor)); + result.setUpdatedActor(UrnToEntityMapper.map(context, updatedActor)); } result.setIsManual(lineageRelationship.hasIsManual() && lineageRelationship.isIsManual()); + if (lineageRelationship.getPaths() != null) { + UrnArrayArray paths = lineageRelationship.getPaths(); + result.setPaths( + paths.stream().map(path -> mapPath(context, path)).collect(Collectors.toList())); + } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 43b28ef85f78ae..fd72edb2972e36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,58 +1,75 @@ package com.linkedin.datahub.graphql.resolvers.load; -import com.linkedin.common.EntityRelationship; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityRelationshipsResult; import com.linkedin.datahub.graphql.generated.RelationshipsInput; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphClient; import com.linkedin.metadata.query.filter.RelationshipDirection; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - +import javax.annotation.Nullable; /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. */ -public class EntityRelationshipsResultResolver implements DataFetcher> { +public class EntityRelationshipsResultResolver + implements DataFetcher> { private final GraphClient _graphClient; + private final EntityService _entityService; + public EntityRelationshipsResultResolver(final GraphClient graphClient) { + this(graphClient, null); + } + + public EntityRelationshipsResultResolver( + final GraphClient graphClient, final EntityService entityService) { _graphClient = graphClient; + _entityService = entityService; } @Override public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - final RelationshipsInput input = bindArgument(environment.getArgument("input"), RelationshipsInput.class); + final RelationshipsInput input = + bindArgument(environment.getArgument("input"), RelationshipsInput.class); final List relationshipTypes = input.getTypes(); - final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = input.getDirection(); + final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = + input.getDirection(); final Integer start = input.getStart(); // Optional! final Integer count = input.getCount(); // Optional! - final RelationshipDirection resolvedDirection = RelationshipDirection.valueOf(relationshipDirection.toString()); - return CompletableFuture.supplyAsync(() -> mapEntityRelationships( - fetchEntityRelationships( - urn, - relationshipTypes, - resolvedDirection, - start, - count, - context.getActorUrn() - ), - resolvedDirection - )); + final RelationshipDirection resolvedDirection = + RelationshipDirection.valueOf(relationshipDirection.toString()); + final boolean includeSoftDelete = input.getIncludeSoftDelete(); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> + mapEntityRelationships( + context, + fetchEntityRelationships( + urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), + resolvedDirection, + includeSoftDelete), + this.getClass().getSimpleName(), + "get"); } private EntityRelationships fetchEntityRelationships( @@ -67,32 +84,64 @@ private EntityRelationships fetchEntityRelationships( } private EntityRelationshipsResult mapEntityRelationships( + @Nullable final QueryContext context, final EntityRelationships entityRelationships, - final RelationshipDirection relationshipDirection - ) { + final RelationshipDirection relationshipDirection, + final boolean includeSoftDelete) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); + + final Set existentUrns; + if (context != null && _entityService != null && !includeSoftDelete) { + Set allRelatedUrns = + entityRelationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toSet()); + existentUrns = _entityService.exists(context.getOperationContext(), allRelatedUrns, false); + } else { + existentUrns = null; + } + + List viewable = + entityRelationships.getRelationships().stream() + .filter( + rel -> + (existentUrns == null || existentUrns.contains(rel.getEntity())) + && (context == null + || canView(context.getOperationContext(), rel.getEntity()))) + .collect(Collectors.toList()); + result.setStart(entityRelationships.getStart()); - result.setCount(entityRelationships.getCount()); - result.setTotal(entityRelationships.getTotal()); - result.setRelationships(entityRelationships.getRelationships().stream().map(entityRelationship -> mapEntityRelationship( - com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf(relationshipDirection.name()), - entityRelationship) - ).collect(Collectors.toList())); + result.setCount(viewable.size()); + // TODO fix the calculation at the graph call + result.setTotal( + entityRelationships.getTotal() - (entityRelationships.getCount() - viewable.size())); + result.setRelationships( + viewable.stream() + .map( + entityRelationship -> + mapEntityRelationship( + context, + com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( + relationshipDirection.name()), + entityRelationship)) + .collect(Collectors.toList())); return result; } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( + @Nullable final QueryContext context, final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { - final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); + final com.linkedin.datahub.graphql.generated.EntityRelationship result = + new com.linkedin.datahub.graphql.generated.EntityRelationship(); + final Entity partialEntity = UrnToEntityMapper.map(context, entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); } result.setType(entityRelationship.getType()); result.setDirection(direction); if (entityRelationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(entityRelationship.getCreated())); + result.setCreated(AuditStampMapper.map(context, entityRelationship.getCreated())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java index 6a32e0b14e3130..d298c344240c74 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java @@ -8,31 +8,27 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; - /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeBatchResolver implements DataFetcher>> { - private final List> _entityTypes; - private final Function> _entitiesProvider; + private final List> _entityTypes; + private final Function> _entitiesProvider; - public EntityTypeBatchResolver( - final List> entityTypes, - final Function> entitiesProvider - ) { - _entityTypes = entityTypes; - _entitiesProvider = entitiesProvider; - } + public EntityTypeBatchResolver( + final List> entityTypes, + final Function> entitiesProvider) { + _entityTypes = entityTypes; + _entitiesProvider = entitiesProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List entities = _entitiesProvider.apply(environment); - return BatchLoadUtils.batchLoadEntitiesOfSameType(entities, _entityTypes, environment.getDataLoaderRegistry()); - } + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List entities = _entitiesProvider.apply(environment); + return BatchLoadUtils.batchLoadEntitiesOfSameType( + entities, _entityTypes, environment.getDataLoaderRegistry()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java index 29d5d78e0ea96a..3c285f30661bce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java @@ -5,64 +5,65 @@ import com.linkedin.datahub.graphql.generated.Entity; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import org.dataloader.DataLoader; /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeResolver implements DataFetcher> { - private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); - private final List> _entityTypes; - private final Function _entityProvider; + private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); + private final List> _entityTypes; + private final Function _entityProvider; - public EntityTypeResolver( - final List> entityTypes, - final Function entity - ) { - _entityTypes = entityTypes; - _entityProvider = entity; - } + public EntityTypeResolver( + final List> entityTypes, + final Function entity) { + _entityTypes = entityTypes; + _entityProvider = entity; + } + private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { + return environment.getField().getSelectionSet().getSelections().stream() + .filter( + selection -> { + if (!(selection instanceof graphql.language.Field)) { + return true; + } + return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); + }) + .count() + == 0; + } - private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { - return environment.getField().getSelectionSet().getSelections().stream().filter(selection -> { - if (!(selection instanceof graphql.language.Field)) { - return true; - } - return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); - }).count() == 0; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity resolvedEntity = _entityProvider.apply(environment); + if (resolvedEntity == null) { + return CompletableFuture.completedFuture(null); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final Entity resolvedEntity = _entityProvider.apply(environment); - if (resolvedEntity == null) { - return CompletableFuture.completedFuture(null); - } - - final Object javaObject = _entityProvider.apply(environment); + final Object javaObject = _entityProvider.apply(environment); - if (isOnlySelectingIdentityFields(environment)) { - return CompletableFuture.completedFuture(javaObject); - } + if (isOnlySelectingIdentityFields(environment)) { + return CompletableFuture.completedFuture(javaObject); + } - final com.linkedin.datahub.graphql.types.EntityType filteredEntity = Iterables.getOnlyElement(_entityTypes.stream() + final com.linkedin.datahub.graphql.types.EntityType filteredEntity = + Iterables.getOnlyElement( + _entityTypes.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); - return loader.load(key); - } + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java index 02a92544855a3a..ee2f7c3abe97dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java @@ -3,41 +3,42 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a batch of urns. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a batch of urns. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeBatchResolver implements DataFetcher>> { - private final LoadableType _loadableType; - private final Function> _keyProvider; + private final LoadableType _loadableType; + private final Function> _keyProvider; - public LoadableTypeBatchResolver(final LoadableType loadableType, final Function> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeBatchResolver( + final LoadableType loadableType, + final Function> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List keys = _keyProvider.apply(environment); - if (keys == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.loadMany(keys); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List keys = _keyProvider.apply(environment); + if (keys == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.loadMany(keys); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java index 53702f9cafe8b4..3868b1a35b64f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java @@ -3,40 +3,41 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeResolver implements DataFetcher> { - private final LoadableType _loadableType; - private final Function _keyProvider; + private final LoadableType _loadableType; + private final Function _keyProvider; - public LoadableTypeResolver(final LoadableType loadableType, final Function keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeResolver( + final LoadableType loadableType, + final Function keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final K key = _keyProvider.apply(environment); - if (key == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.load(key); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final K key = _keyProvider.apply(environment); + if (key == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java index a4867819a2401c..e85eaca127d625 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.OwnerType; import com.linkedin.datahub.graphql.types.LoadableType; @@ -8,38 +9,41 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import org.dataloader.DataLoader; import java.util.stream.Collectors; -import com.google.common.collect.Iterables; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. */ public class OwnerTypeResolver implements DataFetcher> { - private final List> _loadableTypes; - private final Function _urnProvider; + private final List> _loadableTypes; + private final Function _urnProvider; - public OwnerTypeResolver(final List> loadableTypes, final Function urnProvider) { - _loadableTypes = loadableTypes; - _urnProvider = urnProvider; - } + public OwnerTypeResolver( + final List> loadableTypes, + final Function urnProvider) { + _loadableTypes = loadableTypes; + _urnProvider = urnProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final OwnerType ownerType = _urnProvider.apply(environment); - final LoadableType filteredEntity = Iterables.getOnlyElement(_loadableTypes.stream() + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final OwnerType ownerType = _urnProvider.apply(environment); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _loadableTypes.stream() .filter(entity -> ownerType.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - return loader.load(((Entity) ownerType).getUrn()); - } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + return loader.load(((Entity) ownerType).getUrn()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index f13ebf8373e91a..8fc26e3cec9d06 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -1,13 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FilterInput; import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; @@ -20,43 +24,39 @@ import graphql.schema.DataFetchingEnvironment; import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.function.Function; +import java.util.function.BiFunction; import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. - * The purpose of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" API - * to a single place. - * - * It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and limit arguments - * used for filtering the specific TimeSeries Aspects to be fetched. + * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. The purpose + * of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" + * API to a single place. * - * On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping - * a generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil - * be invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. + *

It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and + * limit arguments used for filtering the specific TimeSeries Aspects to be fetched. * + *

On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping a + * generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil be + * invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. */ @Slf4j -public class TimeSeriesAspectResolver implements DataFetcher>> { +public class TimeSeriesAspectResolver + implements DataFetcher>> { private final EntityClient _client; private final String _entityName; private final String _aspectName; - private final Function _aspectMapper; + private final BiFunction _aspectMapper; private final SortCriterion _sort; public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper) { + final BiFunction aspectMapper) { this(client, entityName, aspectName, aspectMapper, null); } @@ -64,7 +64,7 @@ public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper, + final BiFunction aspectMapper, final SortCriterion sort) { _client = client; _entityName = entityName; @@ -73,60 +73,82 @@ public TimeSeriesAspectResolver( _sort = sort; } - /** - * Check whether the actor is authorized to fetch the timeseries aspect given the resource urn - */ + /** Check whether the actor is authorized to fetch the timeseries aspect given the resource urn */ private boolean isAuthorized(QueryContext context, String urn) { - if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals( - Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized(context, Optional.of(new EntitySpec(_entityName, urn)), - PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); + if (_entityName.equals(Constants.DATASET_ENTITY_NAME) + && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE, + new EntitySpec(_entityName, urn)); } return true; } @Override public CompletableFuture> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); - // Fetch the urn, assuming the parent has an urn field. - // todo: what if the parent urn isn't projected? - final String urn = ((Entity) environment.getSource()).getUrn(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + // Fetch the urn, assuming the parent has an urn field. + // todo: what if the parent urn isn't projected? + final String urn = ((Entity) environment.getSource()).getUrn(); - if (!isAuthorized(context, urn)) { - return Collections.emptyList(); - } + if (!isAuthorized(context, urn)) { + return Collections.emptyList(); + } - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - // Max number of aspects to return. - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; - final SortCriterion maybeSort = _sort; + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + // Max number of aspects to return. + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; + final SortCriterion maybeSort = _sort; - try { - // Step 1: Get aspects. - List aspects = - _client.getTimeseriesAspectValues(urn, _entityName, _aspectName, maybeStartTimeMillis, maybeEndTimeMillis, - maybeLimit, buildFilters(maybeFilters), maybeSort, context.getAuthentication()); + try { + // Step 1: Get aspects. + List aspects = + _client.getTimeseriesAspectValues( + context.getOperationContext(), + urn, + _entityName, + _aspectName, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilters(maybeFilters, context.getOperationContext().getAspectRetriever()), + maybeSort); - // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve aspects from GMS", e); - } - }); + // Step 2: Bind profiles into GraphQL strong types. + return aspects.stream() + .map(a -> _aspectMapper.apply(context, a)) + .collect(Collectors.toList()); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve aspects from GMS", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private Filter buildFilters(@Nullable FilterInput maybeFilters) { + private Filter buildFilters( + @Nullable FilterInput maybeFilters, @Nullable AspectRetriever aspectRetriever) { if (maybeFilters == null) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + maybeFilters.getAnd().stream() + .map(filter -> criterionFromFilter(filter, true, aspectRetriever)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java index 619ca95e7d9eda..5cffcd9c35c005 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.urn.CorpuserUrn; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AddLinkInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; @@ -17,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddLinkResolver implements DataFetcher> { @@ -29,47 +28,56 @@ public class AddLinkResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final AddLinkInput input = bindArgument(environment.getArgument("input"), AddLinkInput.class); String linkUrl = input.getLinkUrl(); String linkLabel = input.getLabel(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(context, targetUrn) + && !canUpdateGlossaryEntityLinks(targetUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput( + context.getOperationContext(), linkUrl, targetUrn, _entityService); + try { - log.debug("Adding Link. input: {}", input.toString()); + log.debug("Adding Link. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.addLink( - linkUrl, - linkLabel, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add link to resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to add link to resource with input %s", input.toString()), e); - } - }); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LinkUtils.addLink( + context.getOperationContext(), + linkUrl, + linkLabel, + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to add link to resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to add link to resource with input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - // Returns whether this is a glossary entity and whether you can edit this glossary entity with the + // Returns whether this is a glossary entity and whether you can edit this glossary entity with + // the // Manage all children or Manage direct children privileges private boolean canUpdateGlossaryEntityLinks(Urn targetUrn, QueryContext context) { - final boolean isGlossaryEntity = targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); + final boolean isGlossaryEntity = + targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); if (!isGlossaryEntity) { return false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 3f2dab0a5ba711..7c0f7b3757ee9c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AddOwnerInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnerResolver implements DataFetcher> { @@ -27,6 +26,7 @@ public class AddOwnerResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final AddOwnerInput input = bindArgument(environment.getArgument("input"), AddOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); @@ -41,29 +41,32 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw } OwnerInput ownerInput = ownerInputBuilder.build(); - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + OwnerUtils.validateAuthorizedToUpdateOwners(context, targetUrn); - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + OwnerUtils.validateAddOwnerInput( + context.getOperationContext(), ownerInput, ownerUrn, _entityService); - try { + try { - log.debug("Adding Owner. input: {}", input); + log.debug("Adding Owner. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(ownerInput), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owner to resource with input %s", input), e); - } - }); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + OwnerUtils.addOwnersToResources( + context.getOperationContext(), + ImmutableList.of(ownerInput), + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owner to resource with input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index 4e5b5bdb2a651d..ade4e7b744801f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -1,11 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AddOwnersInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -18,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnersResolver implements DataFetcher> { @@ -29,37 +27,37 @@ public class AddOwnersResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final AddOwnersInput input = bindArgument(environment.getArgument("input"), AddOwnersInput.class); + final QueryContext context = environment.getContext(); + final AddOwnersInput input = + bindArgument(environment.getArgument("input"), AddOwnersInput.class); List owners = input.getOwners(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - OwnerUtils.validateAddOwnerInput( - owners, - targetUrn, - _entityService - ); - try { - - log.debug("Adding Owners. input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - owners, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owners to resource with input %s", input), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + OwnerUtils.validateAuthorizedToUpdateOwners(environment.getContext(), targetUrn); + + OwnerUtils.validateAddOwnerInput( + context.getOperationContext(), owners, targetUrn, _entityService); + try { + + log.debug("Adding Owners. input: {}", input); + + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + OwnerUtils.addOwnersToResources( + context.getOperationContext(), + owners, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owners to resource with input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java index 78d2341492b398..4275f869b29086 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.TagAssociationInput; @@ -17,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagResolver implements DataFetcher> { @@ -27,44 +26,57 @@ public class AddTagResolver implements DataFetcher> { @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final QueryContext context = environment.getContext(); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { - if (!tagUrn.getEntityType().equals("tag")) { - log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals("tag")) { + log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.info("Adding Tag. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info("Adding Tag. input: {}", input.toString()); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.addTagsToResources( + context.getOperationContext(), + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java index 7174f3edffee67..b2fc05720064cd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AddTagsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -20,9 +22,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagsResolver implements DataFetcher> { @@ -31,41 +30,50 @@ public class AddTagsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final AddTagsInput input = bindArgument(environment.getArgument("input"), AddTagsInput.class); - List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTags(context, targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - tagUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { - log.info("Adding Tags. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - tagUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + tagUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { + log.info("Adding Tags. input: {}", input.toString()); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.addTagsToResources( + context.getOperationContext(), + tagUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java index 056b5db4324c34..ffdb394369d173 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.TermAssociationInput; @@ -16,8 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermResolver implements DataFetcher> { @@ -25,39 +26,51 @@ public class AddTermResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); + final QueryContext context = environment.getContext(); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms(context, targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.addTermsToResources( + context.getOperationContext(), + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java index 2f58b6b09e681b..68ba5f977c2bf0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AddTermsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -19,8 +22,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermsResolver implements DataFetcher> { @@ -28,42 +29,51 @@ public class AddTermsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final AddTermsInput input = bindArgument(environment.getArgument("input"), AddTermsInput.class); - List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTerms(context, targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - termUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + termUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - termUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.addTermsToResources( + context.getOperationContext(), + termUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 5beaeecae673f0..28daef1b110628 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BatchAddOwnersInput; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -12,15 +14,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddOwnersResolver implements DataFetcher> { @@ -29,61 +30,82 @@ public class BatchAddOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchAddOwnersInput input = bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); + final BatchAddOwnersInput input = + bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); final List owners = input.getOwners(); final List resources = input.getResources(); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the batch - validateOwners(owners); - validateInputResources(resources, context); + // First, validate the batch + validateOwners(context.getOperationContext(), owners); + validateInputResources(context.getOperationContext(), resources, context); - try { - // Then execute the bulk add - batchAddOwners(owners, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + try { + // Then execute the bulk add + batchAddOwners(context.getOperationContext(), owners, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private void validateOwners(List owners) { + private void validateOwners(@Nonnull OperationContext opContext, List owners) { for (OwnerInput ownerInput : owners) { - OwnerUtils.validateOwner(ownerInput, _entityService); + OwnerUtils.validateOwner(opContext, ownerInput, _entityService); } } - private void validateInputResources(List resources, QueryContext context) { + private void validateInputResources( + @Nonnull OperationContext opContext, List resources, QueryContext context) { for (ResourceRefInput resource : resources) { - validateInputResource(resource, context); + validateInputResource(opContext, resource, context); } } - private void validateInputResource(ResourceRefInput resource, QueryContext context) { + private void validateInputResource( + @Nonnull OperationContext opContext, ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be applied to subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be applied to subresources."); } - if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + OwnerUtils.validateAuthorizedToUpdateOwners(context, resourceUrn); + LabelUtils.validateResource( + opContext, + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchAddOwners(List owners, List resources, QueryContext context) { + private void batchAddOwners( + @Nonnull OperationContext opContext, + List owners, + List resources, + QueryContext context) { log.debug("Batch adding owners. owners: {}, resources: {}", owners, resources); try { - OwnerUtils.addOwnersToResources(owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.addOwnersToResources( + opContext, owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - owners, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + owners, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java index 9c5cddb3c50bca..7cd2483263479f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchAddTagsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -12,21 +15,17 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTagsResolver implements DataFetcher> { @@ -36,62 +35,67 @@ public class BatchAddTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTagsInput input = bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTagsInput input = + bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTags(tagUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTagsToSingleSchemaField(context, resources, tagUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + + // First, validate the batch + validateTags(context.getOperationContext(), tagUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTagsToSingleSchemaField(context, resources, tagUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } /** * When adding tags to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * tag to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the tag to one of its siblings. If that fails, keep trying all siblings until one passes or all + * fail. Then we throw if none succeed. */ private Boolean handleAddTagsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List tagUrns - ) { + @Nonnull final List tagUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); - final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, new HashSet<>(), siblingUrns); + final List siblingUrns = + SiblingsUtils.getSiblingUrns(context.getOperationContext(), resourceUrn, _entityService); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, new HashSet<>(), siblingUrns); } /** - * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. - * Try adding until we attempt all siblings or one passes. Throw if none pass. + * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. Try + * adding until we attempt all siblings or one passes. Throw if none pass. */ private Boolean attemptBatchAddTagsWithSiblings( @Nonnull final List tagUrns, @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,20 +110,26 @@ private Boolean attemptBatchAddTagsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add tags for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } - private void validateTags(List tagUrns) { + private void validateTags(@Nonnull OperationContext opContext, List tagUrns) { for (Urn tagUrn : tagUrns) { - LabelUtils.validateLabel(tagUrn, Constants.TAG_ENTITY_NAME, _entityService); + LabelUtils.validateLabel(opContext, tagUrn, Constants.TAG_ENTITY_NAME, _entityService); } } @@ -132,20 +142,36 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + context.getOperationContext(), + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchAddTags(List tagUrns, List resources, QueryContext context) { - log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); - try { - LabelUtils.addTagsToResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + private void batchAddTags( + List tagUrns, List resources, QueryContext context) { + log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); + try { + LabelUtils.addTagsToResources( + context.getOperationContext(), + tagUrns, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); - } + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java index a46f37b110f4eb..2d092758b776b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchAddTermsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -12,21 +15,17 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTermsResolver implements DataFetcher> { @@ -36,49 +35,55 @@ public class BatchAddTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTermsInput input = bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTermsInput input = + bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTerms(termUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTermsToSingleSchemaField(context, resources, termUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + + // First, validate the batch + validateTerms(context.getOperationContext(), termUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTermsToSingleSchemaField(context, resources, termUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } /** * When adding terms to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * term to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the term to one of its siblings. If that fails, keep trying all siblings until one passes or + * all fail. Then we throw if none succeed. */ private Boolean handleAddTermsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List termUrns - ) { + @Nonnull final List termUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); - final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, new HashSet<>(), siblingUrns); + final List siblingUrns = + SiblingsUtils.getSiblingUrns(context.getOperationContext(), resourceUrn, _entityService); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, new HashSet<>(), siblingUrns); } /** @@ -90,8 +95,7 @@ private Boolean attemptBatchAddTermsWithSiblings( @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,20 +110,27 @@ private Boolean attemptBatchAddTermsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add terms for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } - private void validateTerms(List termUrns) { + private void validateTerms(@Nonnull OperationContext opContext, List termUrns) { for (Urn termUrn : termUrns) { - LabelUtils.validateLabel(termUrn, Constants.GLOSSARY_TERM_ENTITY_NAME, _entityService); + LabelUtils.validateLabel( + opContext, termUrn, Constants.GLOSSARY_TERM_ENTITY_NAME, _entityService); } } @@ -132,20 +143,36 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + context.getOperationContext(), + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchAddTerms(List termUrns, List resources, QueryContext context) { + private void batchAddTerms( + List termUrns, List resources, QueryContext context) { log.debug("Batch adding Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.addTermsToResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Terms %s to resources with urns %s!", + LabelUtils.addTermsToResources( + context.getOperationContext(), termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index debd68646910f2..5aaace4e21e9ca 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BatchRemoveOwnersInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.LabelUtils; @@ -12,15 +14,11 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveOwnersResolver implements DataFetcher> { @@ -29,27 +27,35 @@ public class BatchRemoveOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchRemoveOwnersInput input = bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); + final BatchRemoveOwnersInput input = + bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List owners = input.getOwnerUrns(); final List resources = input.getResources(); - final Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + final Urn ownershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? null + : Urn.createFromString(input.getOwnershipTypeUrn()); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk remove + batchRemoveOwners(owners, ownershipTypeUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } private void validateInputResources(List resources, QueryContext context) { @@ -62,26 +68,42 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be removed from subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be removed from subresources."); } - if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + OwnerUtils.validateAuthorizedToUpdateOwners(context, resourceUrn); + LabelUtils.validateResource( + context.getOperationContext(), + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchRemoveOwners(List ownerUrns, Optional maybeOwnershipTypeUrn, - List resources, QueryContext context) { + private void batchRemoveOwners( + List ownerUrns, + Urn ownershipTypeUrn, + List resources, + QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { - OwnerUtils.removeOwnersFromResources(ownerUrns.stream().map(UrnUtils::getUrn).collect( - Collectors.toList()), maybeOwnershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.removeOwnersFromResources( + context.getOperationContext(), + ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + ownershipTypeUrn, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch remove Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch remove Owners %s to resources with urns %s!", + ownerUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java index ab432f0afcaec0..6d90d07521f924 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchRemoveTagsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -10,15 +13,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTagsResolver implements DataFetcher> { @@ -28,51 +30,73 @@ public class BatchRemoveTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTagsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTagsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(context.getOperationContext(), resources, context); - try { - // Then execute the bulk add - batchRemoveTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTags(context.getOperationContext(), tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private void validateInputResources(List resources, QueryContext context) { + private void validateInputResources( + @Nonnull OperationContext opContext, List resources, QueryContext context) { for (ResourceRefInput resource : resources) { - validateInputResource(resource, context); + validateInputResource(opContext, resource, context); } } - private void validateInputResource(ResourceRefInput resource, QueryContext context) { + private void validateInputResource( + @Nonnull OperationContext opContext, ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + opContext, + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchRemoveTags(List tagUrns, List resources, QueryContext context) { + private void batchRemoveTags( + @Nonnull OperationContext opContext, + List tagUrns, + List resources, + QueryContext context) { log.debug("Batch removing Tags. tags: {}, resources: {}", resources, tagUrns); try { - LabelUtils.removeTagsFromResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTagsFromResources( + opContext, tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java index c8870cc44bf9e6..adc5e77681a3ad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchRemoveTermsInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -10,15 +13,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTermsResolver implements DataFetcher> { @@ -28,51 +30,73 @@ public class BatchRemoveTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTermsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTermsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(context.getOperationContext(), resources, context); - try { - // Then execute the bulk add - batchRemoveTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTerms(context.getOperationContext(), termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private void validateInputResources(List resources, QueryContext context) { + private void validateInputResources( + @Nonnull OperationContext opContext, List resources, QueryContext context) { for (ResourceRefInput resource : resources) { - validateInputResource(resource, context); + validateInputResource(opContext, resource, context); } } - private void validateInputResource(ResourceRefInput resource, QueryContext context) { + private void validateInputResource( + @Nonnull OperationContext opContext, ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + opContext, + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchRemoveTerms(List termUrns, List resources, QueryContext context) { + private void batchRemoveTerms( + @Nonnull OperationContext opContext, + List termUrns, + List resources, + QueryContext context) { log.debug("Batch removing Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.removeTermsFromResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTermsFromResources( + opContext, termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java index 9b6167c673d8db..abbeed29545e4a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchSetDomainInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -11,16 +14,15 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchSetDomainResolver implements DataFetcher> { @@ -30,30 +32,37 @@ public class BatchSetDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDomainInput input = bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); + final BatchSetDomainInput input = + bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); final String maybeDomainUrn = input.getDomainUrn(); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the domain - validateDomain(maybeDomainUrn); - validateInputResources(resources, context); + // First, validate the domain + validateDomain(context.getOperationContext(), maybeDomainUrn); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchSetDomains(maybeDomainUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchSetDomains(maybeDomainUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private void validateDomain(@Nullable String maybeDomainUrn) { + private void validateDomain( + @Nonnull OperationContext opContext, @Nullable String maybeDomainUrn) { if (maybeDomainUrn != null) { - DomainUtils.validateDomain(UrnUtils.getUrn(maybeDomainUrn), _entityService); + DomainUtils.validateDomain(opContext, UrnUtils.getUrn(maybeDomainUrn), _entityService); } } @@ -66,23 +75,36 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + context.getOperationContext(), + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchSetDomains(String maybeDomainUrn, List resources, QueryContext context) { + private void batchSetDomains( + String maybeDomainUrn, List resources, QueryContext context) { log.debug("Batch adding Domains. domainUrn: {}, resources: {}", maybeDomainUrn, resources); try { - DomainUtils.setDomainForResources(maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), + DomainUtils.setDomainForResources( + context.getOperationContext(), + maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - maybeDomainUrn, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + maybeDomainUrn, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java index 5961dc9087a638..546694ede92697 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchUpdateDeprecationInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; @@ -18,9 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateDeprecationResolver implements DataFetcher> { @@ -30,23 +30,34 @@ public class BatchUpdateDeprecationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateDeprecationInput input = bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); + final BatchUpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the resources - validateInputResources(resources, context); + // First, validate the resources + validateInputResources(resources, context); - try { - // Then execute the bulk update - batchUpdateDeprecation(input.getDeprecated(), input.getNote(), input.getDecommissionTime(), resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk update + batchUpdateDeprecation( + input.getDeprecated(), + input.getNote(), + input.getDecommissionTime(), + resources, + context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } private void validateInputResources(List resources, QueryContext context) { @@ -58,19 +69,32 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DeprecationUtils.isAuthorizedToUpdateDeprecationForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + context.getOperationContext(), + resourceUrn, + resource.getSubResource(), + resource.getSubResourceType(), + _entityService); } - private void batchUpdateDeprecation(boolean deprecated, + private void batchUpdateDeprecation( + boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List resources, QueryContext context) { - log.debug("Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", deprecated, note, decommissionTime, resources); + log.debug( + "Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", + deprecated, + note, + decommissionTime, + resources); try { DeprecationUtils.updateDeprecationForResources( + context.getOperationContext(), deprecated, note, decommissionTime, @@ -78,10 +102,14 @@ private void batchUpdateDeprecation(boolean deprecated, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch update deprecated to %s for resources with urns %s!", - deprecated, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch update deprecated to %s for resources with urns %s!", + deprecated, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 69b2b92fb9ccaf..9f24af66a70fa3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchUpdateSoftDeletedInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.DeleteUtils; @@ -14,36 +17,43 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateSoftDeletedInput input = bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); + final BatchUpdateSoftDeletedInput input = + bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); final List urns = input.getUrns(); final boolean deleted = input.getDeleted(); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { - // First, validate the entities exist - validateInputUrns(urns, context); + // First, validate the entities exist + validateInputUrns(urns, context); - try { - // Then execute the bulk soft delete - batchUpdateSoftDeleted(deleted, urns, context); - return true; - } catch (Exception e) { - log.error("Failed to perform batch soft delete against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform batch soft delete against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk soft delete + batchUpdateSoftDeleted(deleted, urns, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform batch soft delete against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform batch soft delete against input %s", input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } private void validateInputUrns(List urnStrs, QueryContext context) { @@ -55,10 +65,12 @@ private void validateInputUrns(List urnStrs, QueryContext context) { private void validateInputUrn(String urnStr, QueryContext context) { final Urn urn = UrnUtils.getUrn(urnStr); if (!DeleteUtils.isAuthorizedToDeleteEntity(context, urn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - if (!_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); + if (!_entityService.exists(context.getOperationContext(), urn, true)) { + throw new IllegalArgumentException( + String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } } @@ -66,14 +78,16 @@ private void batchUpdateSoftDeleted(boolean removed, List urnStrs, Query log.debug("Batch soft deleting assets. urns: {}", urnStrs); try { DeleteUtils.updateStatusForResources( + context.getOperationContext(), removed, urnStrs, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to batch update soft deleted status entities with urns %s!", urnStrs), + String.format( + "Failed to batch update soft deleted status entities with urns %s!", urnStrs), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index 59d5d6939c04c8..6e2fc77e703af3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.linkedin.businessattribute.BusinessAttributeInfo; import com.linkedin.common.urn.Urn; import com.linkedin.container.EditableContainerProperties; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.DomainProperties; @@ -27,215 +29,311 @@ import com.linkedin.schema.EditableSchemaFieldInfo; import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.tag.TagProperties; +import io.datahubproject.metadata.context.OperationContext; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DescriptionUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DescriptionUtils() { } + private DescriptionUtils() {} public static void updateFieldDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService - ) { - EditableSchemaMetadata editableSchemaMetadata = - (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); - - editableFieldInfo.setDescription(newDescription); - - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + EntityService entityService) { + EditableSchemaMetadata editableSchemaMetadata = + (EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); + + editableFieldInfo.setDescription(newDescription); + + persistAspect( + opContext, + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } public static void updateContainerDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { + EntityService entityService) { EditableContainerProperties containerProperties = - (EditableContainerProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableContainerProperties()); + (EditableContainerProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableContainerProperties()); containerProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, containerProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + containerProperties, + actor, + entityService); } public static void updateDomainDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { + EntityService entityService) { DomainProperties domainProperties = - (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, entityService, null); + (DomainProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + entityService, + null); if (domainProperties == null) { - // If there are no properties for the domain already, then we should throw since the properties model also requires a name. + // If there are no properties for the domain already, then we should throw since the + // properties model also requires a name. throw new IllegalArgumentException("Properties for this Domain do not yet exist!"); } domainProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + entityService); } public static void updateTagDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { + EntityService entityService) { TagProperties tagProperties = - (TagProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); + (TagProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.TAG_PROPERTIES_ASPECT_NAME, + entityService, + null); if (tagProperties == null) { - // If there are no properties for the tag already, then we should throw since the properties model also requires a name. + // If there are no properties for the tag already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Tag do not yet exist!"); } tagProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.TAG_PROPERTIES_ASPECT_NAME, + tagProperties, + actor, + entityService); } public static void updateCorpGroupDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { + EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = - (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, entityService, new CorpGroupEditableInfo()); + (CorpGroupEditableInfo) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + entityService, + new CorpGroupEditableInfo()); if (corpGroupEditableInfo != null) { corpGroupEditableInfo.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, corpGroupEditableInfo, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + corpGroupEditableInfo, + actor, + entityService); } public static void updateGlossaryTermDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, entityService, null); + EntityService entityService) { + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + entityService, + null); if (glossaryTermInfo == null) { - // If there are no properties for the term already, then we should throw since the properties model also requires a name. + // If there are no properties for the term already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Glossary Term do not yet exist!"); } - glossaryTermInfo.setDefinition(newDescription); // We call description 'definition' for glossary terms. Not great, we know. :( - persistAspect(resourceUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, entityService); + glossaryTermInfo.setDefinition( + newDescription); // We call description 'definition' for glossary terms. Not great, we know. + // :( + persistAspect( + opContext, + resourceUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + entityService); } public static void updateGlossaryNodeDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService - ) { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, entityService, null); + EntityService entityService) { + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setDefinition(newDescription); - persistAspect(resourceUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + entityService); } public static void updateNotebookDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, entityService, null); + EntityService entityService) { + EditableNotebookProperties notebookProperties = + (EditableNotebookProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + entityService, + null); if (notebookProperties != null) { notebookProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, notebookProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + notebookProperties, + actor, + entityService); } public static Boolean validateFieldDescriptionInput( + @Nonnull OperationContext opContext, Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } - validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); + validateSubresourceExists(opContext, resourceUrn, subResource, subResourceType, entityService); return true; } public static Boolean validateDomainInput( - Urn resourceUrn, - EntityService entityService - ) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + @Nonnull OperationContext opContext, Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } public static Boolean validateContainerInput( - Urn resourceUrn, - EntityService entityService - ) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + @Nonnull OperationContext opContext, Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } public static Boolean validateLabelInput( - Urn resourceUrn, - EntityService entityService - ) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + @Nonnull OperationContext opContext, Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } public static Boolean validateCorpGroupInput( - Urn corpUserUrn, - EntityService entityService - ) { - if (!entityService.exists(corpUserUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); + @Nonnull OperationContext opContext, Urn corpUserUrn, EntityService entityService) { + if (!entityService.exists(opContext, corpUserUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } public static Boolean validateNotebookInput( - Urn notebookUrn, - EntityService entityService) { - if (!entityService.exists(notebookUrn)) { + @Nonnull OperationContext opContext, Urn notebookUrn, EntityService entityService) { + if (!entityService.exists(opContext, notebookUrn, true)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); } return true; } - public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateFieldDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -245,11 +343,14 @@ public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -259,25 +360,31 @@ public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContex orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateContainerDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - targetUrn.getEntityType(), - targetUrn.toString(), - orPrivilegeGroups); - } + public static boolean isAuthorizedToUpdateContainerDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } - public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -288,79 +395,184 @@ public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext cont } public static void updateMlModelDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelProperties()); + EntityService entityService) { + EditableMLModelProperties editableProperties = + (EditableMLModelProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlModelGroupDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelGroupProperties()); + EntityService entityService) { + EditableMLModelGroupProperties editableProperties = + (EditableMLModelGroupProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelGroupProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } + public static void updateMlFeatureDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureProperties()); + EntityService entityService) { + EditableMLFeatureProperties editableProperties = + (EditableMLFeatureProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlFeatureTableDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureTableProperties()); + EntityService entityService) { + EditableMLFeatureTableProperties editableProperties = + (EditableMLFeatureTableProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureTableProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlPrimaryKeyDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLPrimaryKeyProperties()); + EntityService entityService) { + EditableMLPrimaryKeyProperties editableProperties = + (EditableMLPrimaryKeyProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLPrimaryKeyProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateDataProductDescription( + @Nonnull OperationContext opContext, String newDescription, Urn resourceUrn, Urn actor, - EntityService entityService) { - DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, entityService, new DataProductProperties()); + EntityService entityService) { + DataProductProperties properties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + entityService, + new DataProductProperties()); if (properties != null) { properties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + properties, + actor, + entityService); + } + + public static void updateBusinessAttributeDescription( + @Nonnull OperationContext opContext, + String newDescription, + Urn resourceUrn, + Urn actor, + EntityService entityService) { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + entityService, + new BusinessAttributeInfo()); + if (businessAttributeInfo != null) { + businessAttributeInfo.setDescription(newDescription); + } + persistAspect( + opContext, + resourceUrn, + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo, + actor, + entityService); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e5e3a5a0ee42e3..70f7913f7b08cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -6,6 +6,7 @@ import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -19,71 +20,95 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final MoveDomainInput input = ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); + final MoveDomainInput input = + ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(input.getResourceUrn()); - final Urn newParentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Urn newParentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canManageDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!AuthorizationUtils.canManageDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Resource is not a domain."); - } + try { + if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Resource is not a domain."); + } - DomainProperties properties = (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, - null - ); + DomainProperties properties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); - if (properties == null) { - throw new IllegalArgumentException("Domain properties do not exist."); - } + if (properties == null) { + throw new IllegalArgumentException("Domain properties do not exist."); + } - if (newParentDomainUrn != null) { - if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Parent entity is not a domain."); - } - if (!_entityService.exists(newParentDomainUrn)) { - throw new IllegalArgumentException("Parent entity does not exist."); - } - } + if (newParentDomainUrn != null) { + if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Parent entity is not a domain."); + } + if (!_entityService.exists(context.getOperationContext(), newParentDomainUrn, true)) { + throw new IllegalArgumentException("Parent entity does not exist."); + } + } - if (DomainUtils.hasNameConflict(properties.getName(), newParentDomainUrn, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in the destination domain. Please pick a unique name.", properties.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } + if (DomainUtils.hasNameConflict( + properties.getName(), newParentDomainUrn, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in the destination domain. Please pick a unique name.", + properties.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } - properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - MutationUtils.persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, properties, actor, _entityService); - return true; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to move domain {} to parent {} : {}", input.getResourceUrn(), input.getParentDomain(), e.getMessage()); - throw new RuntimeException(String.format("Failed to move domain %s to %s", input.getResourceUrn(), input.getParentDomain()), e); - } - }); + properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + MutationUtils.persistAspect( + context.getOperationContext(), + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + properties, + actor, + _entityService); + return true; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to move domain {} to parent {} : {}", + input.getResourceUrn(), + input.getParentDomain(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to move domain %s to %s", + input.getResourceUrn(), input.getParentDomain()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java index 30bd940a7dfed4..d647374b8e1efc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java @@ -1,20 +1,19 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.codahale.metrics.Timer; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -23,7 +22,8 @@ */ public class MutableTypeBatchResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); private final BatchMutableType _batchMutableType; @@ -33,21 +33,25 @@ public MutableTypeBatchResolver(final BatchMutableType batchMutableType @Override public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - final B[] input = bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); - - return CompletableFuture.supplyAsync(() -> { - Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); - - try { - return _batchMutableType.batchUpdate(input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error("Failed to perform batchUpdate", e); - throw new IllegalArgumentException(e); - } finally { - timer.stop(); - } - }); + final B[] input = + bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); + + try { + return _batchMutableType.batchUpdate(input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error("Failed to perform batchUpdate", e); + throw new IllegalArgumentException(e); + } finally { + timer.stop(); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java index 115a68e808de6c..23d16ed7d4d88b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.MutableType; import graphql.schema.DataFetcher; @@ -8,8 +11,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -18,28 +19,36 @@ */ public class MutableTypeResolver implements DataFetcher> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeResolver.class.getName()); - private final MutableType _mutableType; + private final MutableType _mutableType; - public MutableTypeResolver(final MutableType mutableType) { - _mutableType = mutableType; - } + public MutableTypeResolver(final MutableType mutableType) { + _mutableType = mutableType; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final String urn = environment.getArgument("urn"); - final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug(String.format("Mutating entity. input: %s", input)); - return _mutableType.update(urn, input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error(String.format("Failed to perform update against input %s", input) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final String urn = environment.getArgument("urn"); + final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _logger.debug(String.format("Mutating entity. input: %s", input)); + return _mutableType.update(urn, input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error( + String.format("Failed to perform update against input %s", input) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index c862fcfa83594e..7608007e9dda98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.SystemMetadataUtils.createDefaultSystemMetadata; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; @@ -16,59 +19,73 @@ import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaField; import com.linkedin.schema.SchemaMetadata; +import io.datahubproject.metadata.context.OperationContext; import java.util.Optional; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class MutationUtils { - private MutationUtils() { } - - public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); - entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + private MutationUtils() {} + + public static void persistAspect( + @Nonnull OperationContext opContext, + Urn urn, + String aspectName, + RecordTemplate aspect, + Urn actor, + EntityService entityService) { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); + entityService.ingestProposal(opContext, proposal, EntityUtils.getAuditStamp(actor), false); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param urn * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithUrn(Urn urn, String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithUrn( + Urn urn, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); return setProposalProperties(proposal, urn.getEntityType(), aspectName, aspect); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param entityKey * @param entityType * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithKey(RecordTemplate entityKey, String entityType, - String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithKey( + RecordTemplate entityKey, String entityType, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKey)); return setProposalProperties(proposal, entityType, aspectName, aspect); } - private static MetadataChangeProposal setProposalProperties(MetadataChangeProposal proposal, - String entityType, String aspectName, RecordTemplate aspect) { + private static MetadataChangeProposal setProposalProperties( + MetadataChangeProposal proposal, + String entityType, + String aspectName, + RecordTemplate aspect) { proposal.setEntityType(entityType); proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); proposal.setChangeType(ChangeType.UPSERT); // Assumes proposal is generated first from the builder methods above so SystemMetadata is empty - SystemMetadata systemMetadata = new SystemMetadata(); + SystemMetadata systemMetadata = createDefaultSystemMetadata(); StringMap properties = new StringMap(); properties.put(APP_SOURCE, UI_SOURCE); systemMetadata.setProperties(properties); @@ -77,18 +94,16 @@ private static MetadataChangeProposal setProposalProperties(MetadataChangePropos } public static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); @@ -101,37 +116,42 @@ public static EditableSchemaFieldInfo getFieldInfoFromSchema( } public static Boolean validateSubresourceExists( + @Nonnull OperationContext opContext, Urn targetUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (subResourceType.equals(SubResourceType.DATASET_FIELD)) { - SchemaMetadata schemaMetadata = (SchemaMetadata) entityService.getAspect(targetUrn, - Constants.SCHEMA_METADATA_ASPECT_NAME, 0); + SchemaMetadata schemaMetadata = + (SchemaMetadata) + entityService.getAspect( + opContext, targetUrn, Constants.SCHEMA_METADATA_ASPECT_NAME, 0); if (schemaMetadata == null) { throw new IllegalArgumentException( - String.format("Failed to update %s & field %s. %s has no schema.", targetUrn, subResource, targetUrn) - ); + String.format( + "Failed to update %s & field %s. %s has no schema.", + targetUrn, subResource, targetUrn)); } Optional fieldMatch = - schemaMetadata.getFields().stream().filter(field -> field.getFieldPath().equals(subResource)).findFirst(); + schemaMetadata.getFields().stream() + .filter(field -> field.getFieldPath().equals(subResource)) + .findFirst(); if (!fieldMatch.isPresent()) { - throw new IllegalArgumentException(String.format( - "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", - targetUrn, subResource, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", + targetUrn, subResource, subResource)); } return true; } - throw new IllegalArgumentException(String.format( - "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", - targetUrn, subResource, SubResourceType.values() - )); + throw new IllegalArgumentException( + String.format( + "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", + targetUrn, subResource, SubResourceType.values())); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java index 23c08043af5d3c..e047a24a0adaad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RemoveLinkInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.LinkUtils; @@ -13,9 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveLinkResolver implements DataFetcher> { @@ -24,36 +24,41 @@ public class RemoveLinkResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveLinkInput input = bindArgument(environment.getArgument("input"), RemoveLinkInput.class); + final QueryContext context = environment.getContext(); + final RemoveLinkInput input = + bindArgument(environment.getArgument("input"), RemoveLinkInput.class); String linkUrl = input.getLinkUrl(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { - log.debug("Removing Link input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.removeLink( - linkUrl, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove link from resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to remove link from resource with input %s", input.toString()), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput( + context.getOperationContext(), linkUrl, targetUrn, _entityService); + try { + log.debug("Removing Link input: {}", input); + + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LinkUtils.removeLink( + context.getOperationContext(), linkUrl, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to remove link from resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to remove link from resource with input %s", input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 2d5faaab444589..8d148848855729 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -1,24 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.RemoveOwnerInput; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveOwnerResolver implements DataFetcher> { @@ -27,36 +25,41 @@ public class RemoveOwnerResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveOwnerInput input = bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); + final QueryContext context = environment.getContext(); + final RemoveOwnerInput input = + bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateRemoveInput( - targetUrn, - _entityService - ); - try { - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.removeOwnersFromResources( - ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove owner from resource with input {}", input); - throw new RuntimeException(String.format("Failed to remove owner from resource with input %s", input.toString()), e); - } - }); + Urn ownershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? null + : Urn.createFromString(input.getOwnershipTypeUrn()); + + OwnerUtils.validateAuthorizedToUpdateOwners(context, targetUrn); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + OwnerUtils.validateRemoveInput(context.getOperationContext(), targetUrn, _entityService); + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + OwnerUtils.removeOwnersFromResources( + context.getOperationContext(), + ImmutableList.of(ownerUrn), + ownershipTypeUrn, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to remove owner from resource with input {}", input); + throw new RuntimeException( + String.format( + "Failed to remove owner from resource with input %s", input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java index 33a95c35760614..c1ff3514ff6c47 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.TagAssociationInput; @@ -16,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTagResolver implements DataFetcher> { @@ -26,44 +26,56 @@ public class RemoveTagResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final QueryContext context = environment.getContext(); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags(context, targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - true - ); - try { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + true); + try { - if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { - log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { + log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.debug("Removing Tag. input: %s", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTagsFromResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.debug("Removing Tag. input: %s", input); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.removeTagsFromResources( + context.getOperationContext(), + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java index 8f18b0ecd61989..e9f5489f23af0c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.TermAssociationInput; @@ -16,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTermResolver implements DataFetcher> { @@ -26,45 +26,57 @@ public class RemoveTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final QueryContext context = environment.getContext(); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms(context, targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - true - ); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + context.getOperationContext(), + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + true); - try { + try { - if (!termUrn.getEntityType().equals("glossaryTerm")) { - log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); - return false; - } + if (!termUrn.getEntityType().equals("glossaryTerm")) { + log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); + return false; + } - log.info(String.format("Removing Term. input: {}", input)); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTermsFromResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info(String.format("Removing Term. input: {}", input)); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + LabelUtils.removeTermsFromResources( + context.getOperationContext(), + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java index d6e6e5610da56a..d571dae542d5a7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.DescriptionUpdateInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; @@ -12,19 +15,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class UpdateDescriptionResolver implements DataFetcher> { @@ -33,7 +31,8 @@ public class UpdateDescriptionResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final DescriptionUpdateInput input = bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); + final DescriptionUpdateInput input = + bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating description. input: {}", input.toString()); switch (targetUrn.getEntityType()) { @@ -65,382 +64,525 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return updateMlPrimaryKeyDescription(targetUrn, input, environment.getContext()); case Constants.DATA_PRODUCT_ENTITY_NAME: return updateDataProductDescription(targetUrn, input, environment.getContext()); + case Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME: + return updateBusinessAttributeDescription(targetUrn, input, environment.getContext()); default: throw new RuntimeException( - String.format("Failed to update description. Unsupported resource type %s provided.", targetUrn)); + String.format( + "Failed to update description. Unsupported resource type %s provided.", targetUrn)); } } - private CompletableFuture updateContainerDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - DescriptionUtils.validateContainerInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateContainerDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateContainerDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + DescriptionUtils.validateContainerInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateContainerDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateContainerDescription"); } - private CompletableFuture updateDomainDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateDomainInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDomainDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateDomainDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateDomainInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDomainDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateDomainDescription"); } - // If updating schema field description fails, try again on a sibling until there are no more siblings to try. Then throw if necessary. + // If updating schema field description fails, try again on a sibling until there are no more + // siblings to try. Then throw if necessary. private Boolean attemptUpdateDatasetSchemaFieldDescription( @Nonnull final Urn targetUrn, @Nonnull final DescriptionUpdateInput input, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(targetUrn); try { - DescriptionUtils.validateFieldDescriptionInput(targetUrn, input.getSubResource(), input.getSubResourceType(), + DescriptionUtils.validateFieldDescriptionInput( + context.getOperationContext(), + targetUrn, + input.getSubResource(), + input.getSubResourceType(), _entityService); final Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateFieldDescription(input.getDescription(), targetUrn, input.getSubResource(), actor, + DescriptionUtils.updateFieldDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + input.getSubResource(), + actor, _entityService); return true; } catch (Exception e) { final Optional siblingUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, attemptedUrns); if (siblingUrn.isPresent()) { - log.warn("Failed to update description for input {}, trying sibling urn {} now.", input.toString(), siblingUrn.get()); - return attemptUpdateDatasetSchemaFieldDescription(siblingUrn.get(), input, context, attemptedUrns, siblingUrns); + log.warn( + "Failed to update description for input {}, trying sibling urn {} now.", + input.toString(), + siblingUrn.get()); + return attemptUpdateDatasetSchemaFieldDescription( + siblingUrn.get(), input, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } - private CompletableFuture updateDatasetSchemaFieldDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - if (input.getSubResourceType() == null) { - throw new IllegalArgumentException("Update description without subresource is not currently supported"); - } - - List siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); - - return attemptUpdateDatasetSchemaFieldDescription(targetUrn, input, context, new HashSet<>(), siblingUrns); - }); + private CompletableFuture updateDatasetSchemaFieldDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + if (input.getSubResourceType() == null) { + throw new IllegalArgumentException( + "Update description without subresource is not currently supported"); + } + + List siblingUrns = + SiblingsUtils.getSiblingUrns( + context.getOperationContext(), targetUrn, _entityService); + + return attemptUpdateDatasetSchemaFieldDescription( + targetUrn, input, context, new HashSet<>(), siblingUrns); + }, + this.getClass().getSimpleName(), + "updateDatasetSchemaFieldDescription"); } - private CompletableFuture updateTagDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateTagDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateTagDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateTagDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateTagDescription"); } - private CompletableFuture updateGlossaryTermDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryTermDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryTermDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryTermDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateGlossaryTermDescription"); } - private CompletableFuture updateGlossaryNodeDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryNodeDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryNodeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryNodeDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateGlossaryNodeDescription"); } - private CompletableFuture updateCorpGroupDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateCorpGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateCorpGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateCorpGroupInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateCorpGroupDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateCorpGroupDescription"); } - private CompletableFuture updateNotebookDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateNotebookInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateNotebookDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateNotebookDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateNotebookInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateNotebookDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateNotebookDescription"); } - private CompletableFuture updateMlModelDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateMlModelDescription"); } - private CompletableFuture updateMlModelGroupDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelGroupDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateMlModelGroupDescription"); } - private CompletableFuture updateMlFeatureDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateMlFeatureDescription"); } - private CompletableFuture updateMlPrimaryKeyDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlPrimaryKeyDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlPrimaryKeyDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlPrimaryKeyDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateMlPrimaryKeyDescription"); } - private CompletableFuture updateMlFeatureTableDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureTableDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureTableDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureTableDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateMlFeatureTableDescription"); } - private CompletableFuture updateDataProductDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { + private CompletableFuture updateDataProductDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDataProductDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateDataProductDescription"); + } - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDataProductDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateBusinessAttributeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + // check if user has the rights to update description for business attribute + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // validate label input + DescriptionUtils.validateLabelInput( + context.getOperationContext(), targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateBusinessAttributeDescription( + context.getOperationContext(), + input.getDescription(), + targetUrn, + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }, + this.getClass().getSimpleName(), + "updateBusinessAttributeDescription"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 0e316ac1296ee0..ad6dbbe635ed1f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -1,218 +1,380 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.UpdateNameInput; +import com.linkedin.datahub.graphql.resolvers.businessattribute.BusinessAttributeAuthorizationUtils; import com.linkedin.datahub.graphql.resolvers.dataproduct.DataProductAuthorizationUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.BusinessAttributeUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.dataproduct.DataProductProperties; +import com.linkedin.dataset.EditableDatasetProperties; import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final UpdateNameInput input = bindArgument(environment.getArgument("input"), UpdateNameInput.class); + final QueryContext context = environment.getContext(); + final UpdateNameInput input = + bindArgument(environment.getArgument("input"), UpdateNameInput.class); Urn targetUrn = Urn.createFromString(input.getUrn()); log.info("Updating name. input: {}", input); - return CompletableFuture.supplyAsync(() -> { - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!_entityService.exists(context.getOperationContext(), targetUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + } - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermName(targetUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeName(targetUrn, input, environment.getContext()); - case Constants.DOMAIN_ENTITY_NAME: - return updateDomainName(targetUrn, input, environment.getContext()); - case Constants.CORP_GROUP_ENTITY_NAME: - return updateGroupName(targetUrn, input, environment.getContext()); - case Constants.DATA_PRODUCT_ENTITY_NAME: - return updateDataProductName(targetUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); - } - }); + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermName(targetUrn, input, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeName(targetUrn, input, context); + case Constants.DOMAIN_ENTITY_NAME: + return updateDomainName(targetUrn, input, context); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, context); + case Constants.DATA_PRODUCT_ENTITY_NAME: + return updateDataProductName(targetUrn, input, context); + case Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME: + return updateBusinessAttributeName(targetUrn, input, environment.getContext()); + case Constants.DATASET_ENTITY_NAME: + return updateDatasetName(targetUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update name. Unsupported resource type %s provided.", targetUrn)); + } + }, + this.getClass().getSimpleName(), + "get"); } private Boolean updateGlossaryTermName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { throw new IllegalArgumentException("Glossary Term does not exist"); } glossaryTermInfo.setName(input.getName()); Urn actor = UrnUtils.getUrn(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateGlossaryNodeName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateDomainName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateDomainName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageDomains(context)) { try { - DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, null); + DomainProperties domainProperties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (domainProperties == null) { throw new IllegalArgumentException("Domain does not exist"); } - if (DomainUtils.hasNameConflict(input.getName(), DomainUtils.getParentDomainSafely(domainProperties), context, _entityClient)) { + if (DomainUtils.hasNameConflict( + input.getName(), + DomainUtils.getParentDomainSafely(domainProperties), + context, + _entityClient)) { throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); } domainProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + _entityService); return true; } catch (DataHubGraphQLException e) { throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateGroupName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageUsersAndGroups(context)) { try { - CorpGroupInfo corpGroupInfo = (CorpGroupInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + CorpGroupInfo corpGroupInfo = + (CorpGroupInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.CORP_GROUP_INFO_ASPECT_NAME, + _entityService, + null); if (corpGroupInfo == null) { throw new IllegalArgumentException("Group does not exist"); } corpGroupInfo.setDisplayName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.CORP_GROUP_INFO_ASPECT_NAME, + corpGroupInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // udpates editable dataset properties aspect's name field + private Boolean updateDatasetName(Urn targetUrn, UpdateNameInput input, QueryContext context) { + if (AuthorizationUtils.canEditProperties(targetUrn, context)) { + try { + if (input.getName() != null) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(); + editableDatasetProperties.setName(input.getName()); + final AuditStamp auditStamp = new AuditStamp(); + Urn actor = UrnUtils.getUrn(context.getActorUrn()); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + editableDatasetProperties.setLastModified(auditStamp); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + editableDatasetProperties, + actor, + _entityService); + } + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateDataProductName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { try { - DataProductProperties dataProductProperties = (DataProductProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, _entityService, null); + DataProductProperties dataProductProperties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (dataProductProperties == null) { throw new IllegalArgumentException("Data Product does not exist"); } - Domains dataProductDomains = (Domains) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); - if (dataProductDomains != null && dataProductDomains.hasDomains() && dataProductDomains.getDomains().size() > 0) { + Domains dataProductDomains = + (Domains) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.DOMAINS_ASPECT_NAME, + _entityService, + null); + if (dataProductDomains != null + && dataProductDomains.hasDomains() + && dataProductDomains.getDomains().size() > 0) { // get first domain since we only allow one domain right now Urn domainUrn = UrnUtils.getUrn(dataProductDomains.getDomains().get(0).toString()); - // if they can't edit a data product from either the parent domain permission or from permission on the data product itself, throw error + // if they can't edit a data product from either the parent domain permission or from + // permission on the data product itself, throw error if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn) && !DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } else { // should not happen since data products need to have a domain if (!DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } dataProductProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + dataProductProperties, + actor, + _entityService); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + } + + private Boolean updateBusinessAttributeName( + Urn targetUrn, UpdateNameInput input, QueryContext context) { + if (!BusinessAttributeAuthorizationUtils.canManageBusinessAttribute(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + try { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + _entityService, + null); + if (businessAttributeInfo == null) { + throw new IllegalArgumentException("Business Attribute does not exist"); + } + + if (BusinessAttributeUtils.hasNameConflict(input.getName(), context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists as Business Attribute. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + businessAttributeInfo.setFieldPath(input.getName()); + businessAttributeInfo.setName(input.getName()); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo, + actor, + _entityService); return true; + } catch (DataHubGraphQLException e) { + throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 5d78bc38eafe87..7f714bfd33bd5a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -1,85 +1,106 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateParentNodeInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { - private final EntityService _entityService; + private final EntityService _entityService; private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final UpdateParentNodeInput input = bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); + final UpdateParentNodeInput input = + bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); final QueryContext context = environment.getContext(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + if (!_entityService.exists(context.getOperationContext(), targetUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryNode.", targetUrn, parentNodeUrn)); + if (!_entityService.exists(context.getOperationContext(), parentNodeUrn, true) + || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryNode.", + targetUrn, parentNodeUrn)); } } GlossaryNodeUrn finalParentNodeUrn = parentNodeUrn; - return CompletableFuture.supplyAsync(() -> { - Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - // need to be able to manage current parent node and new parent node - if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) - && GlossaryUtils.canManageChildrenEntities(context, finalParentNodeUrn, _entityClient)) { - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update parentNode. Unsupported resource type %s provided.", targetUrn)); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + // need to be able to manage current parent node and new parent node + if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) + && GlossaryUtils.canManageChildrenEntities( + context, finalParentNodeUrn, _entityClient)) { + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update parentNode. Unsupported resource type %s provided.", + targetUrn)); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private Boolean updateGlossaryTermParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { - // If there is no info aspect for the term already, then we should throw since the model also requires a name. + // If there is no info aspect for the term already, then we should throw since the model + // also requires a name. throw new IllegalArgumentException("Info for this Glossary Term does not yet exist!"); } @@ -89,12 +110,19 @@ private Boolean updateGlossaryTermParentNode( glossaryTermInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } @@ -102,11 +130,16 @@ private Boolean updateGlossaryNodeParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Info for this Glossary Node does not yet exist!"); } @@ -117,12 +150,19 @@ private Boolean updateGlossaryNodeParentNode( glossaryNodeInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + context.getOperationContext(), + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 875bc43e7c100f..8a598f8d8bbdda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -1,8 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; import com.linkedin.datahub.graphql.generated.UserSetting; import com.linkedin.datahub.graphql.resolvers.settings.user.UpdateCorpUserViewsSettingsResolver; @@ -17,15 +22,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} - * instead. - */ +/** Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} instead. */ @Slf4j @RequiredArgsConstructor public class UpdateUserSettingResolver implements DataFetcher> { @@ -35,35 +32,49 @@ public class UpdateUserSettingResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateUserSettingInput input = bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); + final UpdateUserSettingInput input = + bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); UserSetting name = input.getName(); final boolean value = input.getValue(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - return CompletableFuture.supplyAsync(() -> { - try { - // In the future with more settings, we'll need to do a read-modify-write - // for now though, we can just write since there is only 1 setting - CorpUserSettings newSettings = new CorpUserSettings(); - newSettings.setAppearance(new CorpUserAppearanceSettings()); - if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { - newSettings.setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); - } else { - log.error("User Setting name {} not currently supported", name); - throw new RuntimeException(String.format("User Setting name %s not currently supported", name)); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // In the future with more settings, we'll need to do a read-modify-write + // for now though, we can just write since there is only 1 setting + CorpUserSettings newSettings = new CorpUserSettings(); + newSettings.setAppearance(new CorpUserAppearanceSettings()); + if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { + newSettings.setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); + } else { + log.error("User Setting name {} not currently supported", name); + throw new RuntimeException( + String.format("User Setting name %s not currently supported", name)); + } - MetadataChangeProposal proposal = - buildMetadataChangeProposalWithUrn(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + _entityService.ingestProposal( + context.getOperationContext(), proposal, EntityUtils.getAuditStamp(actor), false); - return true; - } catch (Exception e) { - log.error("Failed to perform user settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform user settings update against input %s", input.toString()), e); - } - }); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform user settings update against input %s", input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/BusinessAttributeUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/BusinessAttributeUtils.java new file mode 100644 index 00000000000000..25dc36f74ef73a --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/BusinessAttributeUtils.java @@ -0,0 +1,111 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.schema.ArrayType; +import com.linkedin.schema.BooleanType; +import com.linkedin.schema.BytesType; +import com.linkedin.schema.DateType; +import com.linkedin.schema.EnumType; +import com.linkedin.schema.FixedType; +import com.linkedin.schema.MapType; +import com.linkedin.schema.NumberType; +import com.linkedin.schema.SchemaFieldDataType; +import com.linkedin.schema.StringType; +import com.linkedin.schema.TimeType; +import java.util.Objects; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class BusinessAttributeUtils { + private static final Integer DEFAULT_START = 0; + private static final Integer DEFAULT_COUNT = 1000; + private static final String NAME_INDEX_FIELD_NAME = "name"; + + private BusinessAttributeUtils() {} + + public static boolean hasNameConflict( + String name, QueryContext context, EntityClient entityClient) { + Filter filter = buildNameFilter(name); + try { + final SearchResult gmsResult = + entityClient.filter( + context.getOperationContext(), + Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME, + filter, + null, + DEFAULT_START, + DEFAULT_COUNT); + return gmsResult.getNumEntities() > 0; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch Business Attributes", e); + } + } + + private static Filter buildNameFilter(String name) { + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(buildNameCriterion(name)))); + } + + private static CriterionArray buildNameCriterion(@Nonnull final String name) { + return new CriterionArray( + new Criterion() + .setField(NAME_INDEX_FIELD_NAME) + .setValue(name) + .setCondition(Condition.EQUAL)); + } + + public static SchemaFieldDataType mapSchemaFieldDataType( + com.linkedin.datahub.graphql.generated.SchemaFieldDataType type) { + if (Objects.isNull(type)) { + return null; + } + SchemaFieldDataType schemaFieldDataType = new SchemaFieldDataType(); + switch (type) { + case BYTES: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new BytesType())); + return schemaFieldDataType; + case FIXED: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new FixedType())); + return schemaFieldDataType; + case ENUM: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new EnumType())); + return schemaFieldDataType; + case MAP: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new MapType())); + return schemaFieldDataType; + case TIME: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new TimeType())); + return schemaFieldDataType; + case BOOLEAN: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new BooleanType())); + return schemaFieldDataType; + case STRING: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new StringType())); + return schemaFieldDataType; + case NUMBER: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new NumberType())); + return schemaFieldDataType; + case DATE: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new DateType())); + return schemaFieldDataType; + case ARRAY: + schemaFieldDataType.setType(SchemaFieldDataType.Type.create(new ArrayType())); + return schemaFieldDataType; + default: + return null; + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 7d4c5bee61e19b..73e7f9ec1cca7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,74 +1,58 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import com.datahub.authorization.AuthUtil; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - private DeleteUtils() { } + private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static void updateStatusForResources( + @Nonnull OperationContext opContext, boolean removed, List urnStrs, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (String urnStr : urnStrs) { - changes.add(buildSoftDeleteProposal(removed, urnStr, actor, entityService)); + changes.add(buildSoftDeleteProposal(opContext, removed, urnStr, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } private static MetadataChangeProposal buildSoftDeleteProposal( + @Nonnull OperationContext opContext, boolean removed, String urnStr, Urn actor, - EntityService entityService - ) { - Status status = (Status) EntityUtils.getAspectFromEntity( - urnStr, - Constants.STATUS_ASPECT_NAME, - entityService, - new Status()); + EntityService entityService) { + Status status = + (Status) + EntityUtils.getAspectFromEntity( + opContext, urnStr, Constants.STATUS_ASPECT_NAME, entityService, new Status()); status.setRemoved(removed); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index bd82bbb8e514f3..3912ffa6226bff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; @@ -16,28 +17,29 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; - - @Slf4j public class DeprecationUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeprecationUtils() { } + private DeprecationUtils() {} - public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDeprecationForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -48,41 +50,35 @@ public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryCon } public static void updateDeprecationForResources( + @Nonnull OperationContext opContext, boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildUpdateDeprecationProposal(deprecated, note, decommissionTime, resource, actor, entityService)); + changes.add( + buildUpdateDeprecationProposal( + opContext, deprecated, note, decommissionTime, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } private static MetadataChangeProposal buildUpdateDeprecationProposal( + @Nonnull OperationContext opContext, boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { String resourceUrn = resource.getResourceUrn(); - Deprecation deprecation = getDeprecation( - entityService, - resourceUrn, - actor, - note, - deprecated, - decommissionTime - ); + Deprecation deprecation = + getDeprecation( + opContext, entityService, resourceUrn, actor, note, deprecated, decommissionTime); return MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn(resourceUrn), - Constants.DEPRECATION_ASPECT_NAME, - deprecation - ); + UrnUtils.getUrn(resourceUrn), Constants.DEPRECATION_ASPECT_NAME, deprecation); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 585fbdf53a2ba4..1114cf7344e8f4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -30,8 +32,8 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; - import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -40,13 +42,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - // TODO: Move to consuming from DomainService. @Slf4j public class DomainUtils { @@ -54,17 +51,20 @@ public class DomainUtils { private static final String HAS_PARENT_DOMAIN_INDEX_FIELD_NAME = "hasParentDomain"; private static final String NAME_INDEX_FIELD_NAME = "name"; - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DomainUtils() { } + private DomainUtils() {} - public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -75,40 +75,47 @@ public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext } public static void setDomainForResources( + @Nonnull OperationContext opContext, @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildSetDomainProposal(domainUrn, resource, actor, entityService)); + changes.add(buildSetDomainProposal(opContext, domainUrn, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } private static MetadataChangeProposal buildSetDomainProposal( + @Nonnull OperationContext opContext, @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.DOMAINS_ASPECT_NAME, - entityService, - new Domains()); + EntityService entityService) { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.DOMAINS_ASPECT_NAME, + entityService, + new Domains()); final UrnArray newDomains = new UrnArray(); if (domainUrn != null) { newDomains.add(domainUrn); } domains.setDomains(newDomains); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } - public static void validateDomain(Urn domainUrn, EntityService entityService) { - if (!entityService.exists(domainUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); + public static void validateDomain( + @Nonnull OperationContext opContext, Urn domainUrn, EntityService entityService) { + if (!entityService.exists(opContext, domainUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } } @@ -119,14 +126,12 @@ private static List buildRootDomainCriteria() { new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("false") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("") - .setCondition(Condition.IS_NULL) - ); + .setCondition(Condition.IS_NULL)); return criteria; } @@ -138,14 +143,12 @@ private static List buildParentDomainCriteria(@Nonnull final Urn pare new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("true") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue(parentDomainUrn.toString()) - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); return criteria; } @@ -158,36 +161,38 @@ private static Criterion buildNameCriterion(@Nonnull final String name) { } /** - * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain criterion. - * The reason for the OR on root is elastic can have a null|false value to represent an root domain in the index. + * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain + * criterion. The reason for the OR on root is elastic can have a null|false value to represent an + * root domain in the index. + * * @param name an optional name to AND in to each condition of the filter * @param parentDomainUrn the parent domain (null means root). * @return the Filter */ - public static Filter buildNameAndParentDomainFilter(@Nullable final String name, @Nullable final Urn parentDomainUrn) { + public static Filter buildNameAndParentDomainFilter( + @Nullable final String name, @Nullable final Urn parentDomainUrn) { if (parentDomainUrn == null) { - return new Filter().setOr( - new ConjunctiveCriterionArray( - buildRootDomainCriteria().stream().map(parentCriterion -> { - final CriterionArray array = new CriterionArray(parentCriterion); - if (name != null) { - array.add(buildNameCriterion(name)); - } - return new ConjunctiveCriterion().setAnd(array); - }).collect(Collectors.toList()) - ) - ); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + buildRootDomainCriteria().stream() + .map( + parentCriterion -> { + final CriterionArray array = new CriterionArray(parentCriterion); + if (name != null) { + array.add(buildNameCriterion(name)); + } + return new ConjunctiveCriterion().setAnd(array); + }) + .collect(Collectors.toList()))); } final CriterionArray andArray = new CriterionArray(buildParentDomainCriteria(parentDomainUrn)); if (name != null) { andArray.add(buildNameCriterion(name)); } - return new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(andArray) - ) - ); + return new Filter() + .setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(andArray))); } public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn) { @@ -196,6 +201,7 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn /** * Check if a domain has any child domains + * * @param domainUrn the URN of the domain to check * @param context query context (includes authorization context to authorize the request) * @param entityClient client used to perform the check @@ -204,18 +210,14 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn public static boolean hasChildDomains( @Nonnull final Urn domainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) throws RemoteInvocationException { + @Nonnull final EntityClient entityClient) + throws RemoteInvocationException { Filter parentDomainFilter = buildParentDomainFilter(domainUrn); // Search for entities matching parent domain // Limit count to 1 for existence check - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - parentDomainFilter, - null, - 0, - 1, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + context.getOperationContext(), DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1); return (searchResult.getNumEntities() > 0); } @@ -223,29 +225,24 @@ private static Map getDomainsByNameAndParent( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { final Filter filter = buildNameAndParentDomainFilter(name, parentDomainUrn); - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + context.getOperationContext(), DOMAIN_ENTITY_NAME, filter, null, 0, 1000); - final Set domainUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Set domainUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); return entityClient.batchGetV2( + context.getOperationContext(), DOMAIN_ENTITY_NAME, domainUrns, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication()); + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)); } catch (Exception e) { throw new RuntimeException("Failed fetching Domains by name and parent", e); } @@ -255,51 +252,63 @@ public static boolean hasNameConflict( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { - final Map entities = getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); + @Nonnull final EntityClient entityClient) { + final Map entities = + getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); // Even though we searched by name, do one more pass to check the name is unique - return entities.values().stream().anyMatch(entityResponse -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data(); - DomainProperties domainProperties = new DomainProperties(dataMap); - return (domainProperties.hasName() && domainProperties.getName().equals(name)); - } - return false; - }); + return entities.values().stream() + .anyMatch( + entityResponse -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); + DomainProperties domainProperties = new DomainProperties(dataMap); + return (domainProperties.hasName() && domainProperties.getName().equals(name)); + } + return false; + }); } @Nullable public static Entity getParentDomain( @Nonnull final Urn urn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { - final EntityResponse entityResponse = entityClient.getV2( - DOMAIN_ENTITY_NAME, - urn, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - - if (entityResponse != null && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); + final EntityResponse entityResponse = + entityClient.getV2( + context.getOperationContext(), + DOMAIN_ENTITY_NAME, + urn, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)); + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); - return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; + return parentDomainUrn != null ? UrnToEntityMapper.map(context, parentDomainUrn) : null; } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve parent domain for entity %s", urn), e); + throw new RuntimeException( + String.format("Failed to retrieve parent domain for entity %s", urn), e); } return null; } /** - * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where moving a domain - * to the root leaves the parentDomain field set but makes hasParentDomain false. This helper makes sure that queries - * to elastic where hasParentDomain=false and parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where + * moving a domain to the root leaves the parentDomain field set but makes hasParentDomain false. + * This helper makes sure that queries to elastic where hasParentDomain=false and + * parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * * @param properties the domain properties aspect * @return the parentDomain or null */ @@ -307,4 +316,4 @@ public static Entity getParentDomain( public static Urn getParentDomainSafely(@Nonnull final DomainProperties properties) { return properties.hasParentDomain() ? properties.getParentDomain() : null; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java index 8aa4a8d756bea6..15c93904fc3bdd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java @@ -3,7 +3,6 @@ import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -11,20 +10,22 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EmbedUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private EmbedUtils() { } + private EmbedUtils() {} - public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateEmbedForEntity( + @Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -33,4 +34,4 @@ public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn enti entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java new file mode 100644 index 00000000000000..17718f39c12387 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/FormUtils.java @@ -0,0 +1,216 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.generated.CreateDynamicFormAssignmentInput; +import com.linkedin.datahub.graphql.generated.CreateFormInput; +import com.linkedin.datahub.graphql.generated.CreatePromptInput; +import com.linkedin.datahub.graphql.generated.FormActorAssignmentInput; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParamsInput; +import com.linkedin.datahub.graphql.generated.SubmitFormPromptInput; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.form.DynamicFormAssignment; +import com.linkedin.form.FormActorAssignment; +import com.linkedin.form.FormInfo; +import com.linkedin.form.FormPrompt; +import com.linkedin.form.FormPromptArray; +import com.linkedin.form.FormPromptType; +import com.linkedin.form.FormType; +import com.linkedin.form.StructuredPropertyParams; +import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.structured.PrimitivePropertyValueArray; +import java.util.List; +import java.util.Objects; +import java.util.UUID; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class FormUtils { + + private static final String COMPLETED_FORMS = "completedForms"; + private static final String INCOMPLETE_FORMS = "incompleteForms"; + private static final String VERIFIED_FORMS = "verifiedForms"; + private static final String OWNERS = "owners"; + private static final String COMPLETED_FORMS_COMPLETED_PROMPT_IDS = + "completedFormsCompletedPromptIds"; + private static final String INCOMPLETE_FORMS_COMPLETED_PROMPT_IDS = + "incompleteFormsCompletedPromptIds"; + + private FormUtils() {} + + public static PrimitivePropertyValueArray getStructuredPropertyValuesFromInput( + @Nonnull final SubmitFormPromptInput input) { + final PrimitivePropertyValueArray values = new PrimitivePropertyValueArray(); + + input + .getStructuredPropertyParams() + .getValues() + .forEach(value -> values.add(StructuredPropertyUtils.mapPropertyValueInput(value))); + + return values; + } + + /** Map a GraphQL CreateDynamicFormAssignmentInput to the GMS DynamicFormAssignment aspect */ + @Nonnull + public static DynamicFormAssignment mapDynamicFormAssignment( + @Nonnull final CreateDynamicFormAssignmentInput input, + @Nullable AspectRetriever aspectRetriever) { + Objects.requireNonNull(input, "input must not be null"); + + final DynamicFormAssignment result = new DynamicFormAssignment(); + final Filter filter = + new Filter() + .setOr( + ResolverUtils.buildConjunctiveCriterionArrayWithOr( + input.getOrFilters(), aspectRetriever)); + result.setFilter(filter); + return result; + } + + /** + * Creates a Filter where the provided formUrn is either in completedForms or incompleteForms for + * an entity + */ + private static Filter generateCompleteOrIncompleteFilter(@Nonnull final String formUrn) + throws Exception { + final CriterionArray completedFormsAndArray = new CriterionArray(); + final CriterionArray incompleteFormsAndArray = new CriterionArray(); + completedFormsAndArray.add(buildFormCriterion(formUrn, COMPLETED_FORMS)); + incompleteFormsAndArray.add(buildFormCriterion(formUrn, INCOMPLETE_FORMS)); + // need this to be an OR not two ANDs + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(completedFormsAndArray), + new ConjunctiveCriterion().setAnd(incompleteFormsAndArray))); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field) { + return buildFormCriterion(formUrn, field, false); + } + + private static Criterion buildFormCriterion( + @Nonnull final String formUrn, @Nonnull final String field, final boolean negated) { + return new Criterion() + .setField(field) + .setValue(formUrn) + .setCondition(Condition.EQUAL) + .setNegated(negated); + } + + private static boolean isActorExplicitlyAssigned( + @Nonnull final Urn actorUrn, @Nonnull final FormInfo formInfo) { + return (formInfo.getActors().getUsers() != null + && formInfo.getActors().getUsers().stream().anyMatch(user -> user.equals(actorUrn))) + || (formInfo.getActors().getGroups() != null + && formInfo.getActors().getGroups().stream().anyMatch(group -> group.equals(actorUrn))); + } + + @Nonnull + public static FormInfo mapFormInfo(@Nonnull final CreateFormInput input) { + Objects.requireNonNull(input, "input must not be null"); + + final FormInfo result = new FormInfo(); + result.setName(input.getName()); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); + } + if (input.getType() != null) { + result.setType(FormType.valueOf(input.getType().toString())); + } + if (input.getPrompts() != null) { + result.setPrompts(mapPrompts(input.getPrompts())); + } + if (input.getActors() != null) { + result.setActors(mapFormActorAssignment(input.getActors())); + } + + return result; + } + + @Nonnull + public static FormPromptArray mapPrompts(@Nonnull final List promptInputs) { + Objects.requireNonNull(promptInputs, "promptInputs must not be null"); + + final FormPromptArray result = new FormPromptArray(); + promptInputs.forEach( + promptInput -> { + result.add(mapPrompt(promptInput)); + }); + return result; + } + + @Nonnull + public static FormPrompt mapPrompt(@Nonnull final CreatePromptInput promptInput) { + Objects.requireNonNull(promptInput, "promptInput must not be null"); + + final FormPrompt result = new FormPrompt(); + String promptId = + promptInput.getId() != null ? promptInput.getId() : UUID.randomUUID().toString(); + result.setId(promptId); + result.setTitle(promptInput.getTitle()); + if (promptInput.getDescription() != null) { + result.setDescription(promptInput.getDescription()); + } + if (promptInput.getType() != null) { + result.setType(FormPromptType.valueOf(promptInput.getType().toString())); + } + if (promptInput.getStructuredPropertyParams() != null) { + result.setStructuredPropertyParams( + mapStructuredPropertyParams(promptInput.getStructuredPropertyParams())); + } + if (promptInput.getRequired() != null) { + result.setRequired(promptInput.getRequired()); + } + + return result; + } + + @Nonnull + public static StructuredPropertyParams mapStructuredPropertyParams( + @Nonnull final StructuredPropertyParamsInput paramsInput) { + Objects.requireNonNull(paramsInput, "paramsInput must not be null"); + + final StructuredPropertyParams result = new StructuredPropertyParams(); + result.setUrn(UrnUtils.getUrn(paramsInput.getUrn())); + return result; + } + + @Nonnull + public static FormActorAssignment mapFormActorAssignment( + @Nonnull final FormActorAssignmentInput input) { + Objects.requireNonNull(input, "input must not be null"); + + final FormActorAssignment result = new FormActorAssignment(); + if (input.getOwners() != null) { + result.setOwners(input.getOwners()); + } + if (input.getUsers() != null) { + UrnArray userUrns = new UrnArray(); + input.getUsers().forEach(user -> userUrns.add(UrnUtils.getUrn(user))); + result.setUsers(userUrns); + } + if (input.getGroups() != null) { + UrnArray groupUrns = new UrnArray(); + input.getGroups().forEach(group -> groupUrns.add(UrnUtils.getUrn(group))); + result.setUsers(groupUrns); + } + + return result; + } + + @Nonnull + public static List mapPromptsToAdd( + @Nonnull final List promptsToAdd) { + return promptsToAdd.stream().map(FormUtils::mapPrompt).collect(Collectors.toList()); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 655e5333cb34e1..16df9911f3bec3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,12 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.AuthUtil; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; @@ -15,32 +16,35 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; -import lombok.extern.slf4j.Slf4j; - +import java.net.URISyntaxException; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.net.URISyntaxException; -import java.util.Optional; +import lombok.extern.slf4j.Slf4j; @Slf4j public class GlossaryUtils { - private GlossaryUtils() { } + private GlossaryUtils() {} /** - * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the user has global control - * of their Business Glossary to create, edit, move, and delete Terms and Nodes. + * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the + * user has global control of their Business Glossary to create, edit, move, and delete Terms and + * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** - * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes under a parent Node. - * They can do this with either the global MANAGE_GLOSSARIES privilege, or if they have the MANAGE_GLOSSARY_CHILDREN privilege - * on the relevant parent node in the Glossary. + * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes + * under a parent Node. They can do this with either the global MANAGE_GLOSSARIES privilege, or if + * they have the MANAGE_GLOSSARY_CHILDREN privilege on the relevant parent node in the Glossary. */ - public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, @Nonnull EntityClient entityClient) { + public static boolean canManageChildrenEntities( + @Nonnull QueryContext context, + @Nullable Urn parentNodeUrn, + @Nonnull EntityClient entityClient) { if (canManageGlossaries(context)) { return true; } @@ -48,28 +52,31 @@ public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @ return false; // if no parent node, we must rely on the canManageGlossaries method above } - //Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege - if (hasManagePrivilege(context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { + // Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege + if (hasManagePrivilege( + context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } - //Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no parent associated. + // Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no + // parent associated. Urn currentParentNodeUrn = parentNodeUrn; while (currentParentNodeUrn != null) { - if (hasManagePrivilege(context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { + if (hasManagePrivilege( + context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } currentParentNodeUrn = getParentUrn(currentParentNodeUrn, context, entityClient); } return false; - } - public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())) - )); + public static boolean hasManagePrivilege( + @Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -83,13 +90,24 @@ public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullabl * Returns the urn of the parent node for a given Glossary Term. Returns null if it doesn't exist. */ @Nullable - private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getTermParentUrn( + @Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_TERM_ENTITY_NAME, termUrn, - ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { - GlossaryTermInfo termInfo = new GlossaryTermInfo(response.getAspects() - .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + context.getOperationContext(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + termUrn, + ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME)); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { + GlossaryTermInfo termInfo = + new GlossaryTermInfo( + response + .getAspects() + .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) + .getValue() + .data()); return termInfo.getParentNode(); } return null; @@ -102,13 +120,24 @@ private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext * Returns the urn of the parent node for a given Glossary Node. Returns null if it doesn't exist. */ @Nullable - private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getNodeParentUrn( + @Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_NODE_ENTITY_NAME, nodeUrn, - ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { - GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(response.getAspects() - .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + context.getOperationContext(), + Constants.GLOSSARY_NODE_ENTITY_NAME, + nodeUrn, + ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME)); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { + GlossaryNodeInfo nodeInfo = + new GlossaryNodeInfo( + response + .getAspects() + .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) + .getValue() + .data()); return nodeInfo.getParentNode(); } return null; @@ -118,17 +147,21 @@ private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext } /** - * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null otherwise. + * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null + * otherwise. */ @Nullable - public static Urn getParentUrn(@Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + public static Urn getParentUrn( + @Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { switch (urn.getEntityType()) { case Constants.GLOSSARY_TERM_ENTITY_NAME: return getTermParentUrn(urn, context, entityClient); case Constants.GLOSSARY_NODE_ENTITY_NAME: return getNodeParentUrn(urn, context, entityClient); default: - log.warn("Tried to get the parent node urn of a non-glossary entity type: {}", urn.getEntityType()); + log.warn( + "Tried to get the parent node urn of a non-glossary entity type: {}", + urn.getEntityType()); return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index a93c7d5b333da1..3eac819a9cc48d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -1,6 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; +import com.linkedin.businessattribute.BusinessAttributeInfo; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; @@ -13,8 +18,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.metadata.Constants; @@ -24,116 +27,147 @@ import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.schema.EditableSchemaFieldInfo; import com.linkedin.schema.EditableSchemaMetadata; +import io.datahubproject.metadata.context.OperationContext; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming GlossaryTermService, TagService. @Slf4j public class LabelUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LabelUtils() { } + private LabelUtils() {} public static void removeTermFromResource( + @Nonnull OperationContext opContext, Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermIfExists(terms, labelUrn); - persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + terms, + actor, + entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermIfExists(editableFieldInfo.getGlossaryTerms(), labelUrn); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } public static void removeTagsFromResources( + @Nonnull OperationContext opContext, List tags, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveTagsProposal(tags, resource, actor, entityService)); + changes.add(buildRemoveTagsProposal(opContext, tags, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } public static void addTagsToResources( + @Nonnull OperationContext opContext, List tagUrns, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddTagsProposal(tagUrns, resource, actor, entityService)); + changes.add(buildAddTagsProposal(opContext, tagUrns, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } public static void removeTermsFromResources( + @Nonnull OperationContext opContext, List termUrns, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveTermsProposal(termUrns, resource, actor, entityService)); + changes.add(buildRemoveTermsProposal(opContext, termUrns, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } public static void addTermsToResources( + @Nonnull OperationContext opContext, List termUrns, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddTermsProposal(termUrns, resource, actor, entityService)); + changes.add(buildAddTermsProposal(opContext, termUrns, resource, actor, entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } public static void addTermsToResource( + @Nonnull OperationContext opContext, List labelUrns, Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -141,13 +175,25 @@ public static void addTermsToResource( } addTermsIfNotExists(terms, labelUrns); - persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + terms, + actor, + entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -155,7 +201,13 @@ public static void addTermsToResource( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), labelUrns); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } @@ -181,17 +233,22 @@ private static GlossaryTermAssociationArray removeTermIfExists(GlossaryTerms ter return termArray; } - public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTags( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -201,19 +258,23 @@ public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Ur orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTerms( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType() - )) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -224,170 +285,237 @@ public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, U } public static void validateResourceAndLabel( + @Nonnull OperationContext opContext, List labelUrns, Urn resourceUrn, String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, - Boolean isRemoving - ) { + EntityService entityService, + Boolean isRemoving) { for (Urn urn : labelUrns) { - validateResourceAndLabel(urn, resourceUrn, subResource, subResourceType, labelEntityType, entityService, isRemoving); + validateResourceAndLabel( + opContext, + urn, + resourceUrn, + subResource, + subResourceType, + labelEntityType, + entityService, + isRemoving); } } - public static void validateLabel(Urn labelUrn, String labelEntityType, EntityService entityService) { + public static void validateLabel( + @Nonnull OperationContext opContext, + Urn labelUrn, + String labelEntityType, + EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn type does not match entity type %s..", - labelUrn, - labelEntityType)); + throw new IllegalArgumentException( + String.format( + "Failed to validate label with urn %s. Urn type does not match entity type %s..", + labelUrn, labelEntityType)); } - if (!entityService.exists(labelUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); + if (!entityService.exists(opContext, labelUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } } // TODO: Move this out into a separate utilities class. - public static void validateResource(Urn resourceUrn, String subResource, SubResourceType subResourceType, EntityService entityService) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); + public static void validateResource( + @Nonnull OperationContext opContext, + Urn resourceUrn, + String subResource, + SubResourceType subResourceType, + EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); } if ((subResource != null && subResource.length() > 0) || subResourceType != null) { if (subResource == null || subResource.length() == 0) { - throw new IllegalArgumentException(String.format( - "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", resourceUrn, subResourceType)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", + resourceUrn, subResourceType)); } if (subResourceType == null) { - throw new IllegalArgumentException(String.format( - "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", resourceUrn, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", + resourceUrn, subResource)); } - validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); + validateSubresourceExists( + opContext, resourceUrn, subResource, subResourceType, entityService); } } public static void validateResourceAndLabel( + @Nonnull OperationContext opContext, Urn labelUrn, Urn resourceUrn, String subResource, SubResourceType subResourceType, String labelEntityType, - EntityService entityService, - Boolean isRemoving - ) { + EntityService entityService, + Boolean isRemoving) { if (!isRemoving) { - validateLabel(labelUrn, labelEntityType, entityService); + validateLabel(opContext, labelUrn, labelEntityType, entityService); } - validateResource(resourceUrn, subResource, subResourceType, entityService); + validateResource(opContext, resourceUrn, subResource, subResourceType, entityService); } private static MetadataChangeProposal buildAddTagsProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity - return buildAddTagsToEntityProposal(tagUrns, resource, actor, entityService); + Urn targetUrn = Urn.createFromString(resource.getResourceUrn()); + if (targetUrn.getEntityType().equals(Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME)) { + return buildAddTagsToBusinessAttributeProposal( + opContext, tagUrns, resource, actor, entityService); + } + return buildAddTagsToEntityProposal(opContext, tagUrns, resource, actor, entityService); } else { // Case 2: Adding tags to subresource (e.g. schema fields) - return buildAddTagsToSubResourceProposal(tagUrns, resource, actor, entityService); + return buildAddTagsToSubResourceProposal(opContext, tagUrns, resource, actor, entityService); } } private static MetadataChangeProposal buildRemoveTagsProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity - return buildRemoveTagsToEntityProposal(tagUrns, resource, actor, entityService); + Urn targetUrn = Urn.createFromString(resource.getResourceUrn()); + if (targetUrn.getEntityType().equals(Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME)) { + return buildRemoveTagsToBusinessAttributeProposal( + opContext, tagUrns, resource, actor, entityService); + } + return buildRemoveTagsToEntityProposal(opContext, tagUrns, resource, actor, entityService); } else { // Case 2: Adding tags to subresource (e.g. schema fields) - return buildRemoveTagsToSubResourceProposal(tagUrns, resource, actor, entityService); + return buildRemoveTagsToSubResourceProposal( + opContext, tagUrns, resource, actor, entityService); } } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } removeTagsIfExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - entityService, - new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildAddTagsToEntityProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } addTagsIfNotExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( + @Nonnull OperationContext opContext, List tagUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } - private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throws URISyntaxException { + private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) + throws URISyntaxException { if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } @@ -396,7 +524,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw List tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -415,44 +544,64 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw } private static MetadataChangeProposal buildAddTermsProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity - return buildAddTermsToEntityProposal(termUrns, resource, actor, entityService); + Urn targetUrn = Urn.createFromString(resource.getResourceUrn()); + if (targetUrn.getEntityType().equals(Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME)) { + return buildAddTermsToBusinessAttributeProposal( + opContext, termUrns, resource, actor, entityService); + } + return buildAddTermsToEntityProposal(opContext, termUrns, resource, actor, entityService); } else { // Case 2: Adding terms to subresource (e.g. schema fields) - return buildAddTermsToSubResourceProposal(termUrns, resource, actor, entityService); + return buildAddTermsToSubResourceProposal( + opContext, termUrns, resource, actor, entityService); } } private static MetadataChangeProposal buildRemoveTermsProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity - return buildRemoveTermsToEntityProposal(termUrns, resource, actor, entityService); + Urn targetUrn = Urn.createFromString(resource.getResourceUrn()); + if (targetUrn.getEntityType().equals(Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME)) { + return buildRemoveTermsToBusinessAttributeProposal( + opContext, termUrns, resource, actor, entityService); + } + return buildRemoveTermsToEntityProposal(opContext, termUrns, resource, actor, entityService); } else { // Case 2: Removing terms from subresource (e.g. schema fields) - return buildRemoveTermsToSubResourceProposal(termUrns, resource, actor, entityService); + return buildRemoveTermsToSubResourceProposal( + opContext, termUrns, resource, actor, entityService); } } private static MetadataChangeProposal buildAddTermsToEntityProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -460,20 +609,28 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } addTermsIfNotExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -481,42 +638,58 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermsIfExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( + @Nonnull OperationContext opContext, List termUrns, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermsIfExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) @@ -547,7 +720,8 @@ private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) } } - private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms terms, List termUrns) { + private static GlossaryTermAssociationArray removeTermsIfExists( + GlossaryTerms terms, List termUrns) { if (!terms.hasTerms()) { terms.setTerms(new GlossaryTermAssociationArray()); } @@ -557,4 +731,105 @@ private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms te } return termAssociationArray; } + + private static MetadataChangeProposal buildAddTagsToBusinessAttributeProposal( + OperationContext opContext, + List tagUrns, + ResourceRefInput resource, + Urn actor, + EntityService entityService) + throws URISyntaxException { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + entityService, + new GlobalTags()); + + if (!businessAttributeInfo.hasGlobalTags()) { + businessAttributeInfo.setGlobalTags(new GlobalTags()); + } + addTagsIfNotExists(businessAttributeInfo.getGlobalTags(), tagUrns); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo); + } + + private static MetadataChangeProposal buildAddTermsToBusinessAttributeProposal( + OperationContext opContext, + List termUrns, + ResourceRefInput resource, + Urn actor, + EntityService entityService) + throws URISyntaxException { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + entityService, + new GlossaryTerms()); + if (!businessAttributeInfo.hasGlossaryTerms()) { + businessAttributeInfo.setGlossaryTerms(new GlossaryTerms()); + } + businessAttributeInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); + addTermsIfNotExists(businessAttributeInfo.getGlossaryTerms(), termUrns); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo); + } + + private static MetadataChangeProposal buildRemoveTagsToBusinessAttributeProposal( + OperationContext opContext, + List tagUrns, + ResourceRefInput resource, + Urn actor, + EntityService entityService) { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + entityService, + new GlobalTags()); + + if (!businessAttributeInfo.hasGlobalTags()) { + businessAttributeInfo.setGlobalTags(new GlobalTags()); + } + removeTagsIfExists(businessAttributeInfo.getGlobalTags(), tagUrns); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo); + } + + private static MetadataChangeProposal buildRemoveTermsToBusinessAttributeProposal( + OperationContext opContext, + List termUrns, + ResourceRefInput resource, + Urn actor, + EntityService entityService) { + BusinessAttributeInfo businessAttributeInfo = + (BusinessAttributeInfo) + EntityUtils.getAspectFromEntity( + opContext, + resource.getResourceUrn(), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + entityService, + new GlossaryTerms()); + if (!businessAttributeInfo.hasGlossaryTerms()) { + businessAttributeInfo.setGlossaryTerms(new GlossaryTerms()); + } + removeTermsIfExists(businessAttributeInfo.getGlossaryTerms(), termUrns); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + businessAttributeInfo); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index 9ec0f9b8e6070d..a2d4692db5b7b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; @@ -9,59 +12,73 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.entity.EntityUtils; +import io.datahubproject.metadata.context.OperationContext; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class LinkUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LinkUtils() { } + private LinkUtils() {} public static void addLink( + @Nonnull OperationContext opContext, String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); addLink(institutionalMemoryAspect, linkUrl, linkLabel, actor); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } public static void removeLink( + @Nonnull OperationContext opContext, String linkUrl, Urn resourceUrn, Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); removeLink(institutionalMemoryAspect, linkUrl); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + opContext, + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } - private static void addLink(InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { + private static void addLink( + InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { if (!institutionalMemoryAspect.hasElements()) { institutionalMemoryAspect.setElements(new InstitutionalMemoryMetadataArray()); } @@ -90,10 +107,12 @@ private static void removeLink(InstitutionalMemory institutionalMemoryAspect, St } public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -104,21 +123,25 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( + @Nonnull OperationContext opContext, String linkUrl, Urn resourceUrn, - EntityService entityService - ) { + EntityService entityService) { try { new Url(linkUrl); } catch (Exception e) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Expected a corp group urn.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Expected a corp group urn.", + resourceUrn)); } - if (!entityService.exists(resourceUrn)) { + if (!entityService.exists(opContext, resourceUrn, true)) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Resource does not exist.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Resource does not exist.", + resourceUrn)); } return true; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 72339958044231..29056eb71a7a3a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -13,6 +15,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.OwnerEntityType; import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.OwnershipType; @@ -22,257 +25,307 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.List; -import java.util.Optional; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static final String SYSTEM_ID = "__system__"; - private OwnerUtils() { } + private OwnerUtils() {} public static void addOwnersToResources( - List owners, - List resources, - Urn actor, - EntityService entityService - ) { + @Nonnull OperationContext opContext, + List ownerInputs, + List resourceRefs, + Urn actorUrn, + EntityService entityService) { final List changes = new ArrayList<>(); - for (ResourceRefInput resource : resources) { - changes.add(buildAddOwnersProposal(owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + for (ResourceRefInput resource : resourceRefs) { + changes.add( + buildAddOwnersProposal( + opContext, + ownerInputs, + UrnUtils.getUrn(resource.getResourceUrn()), + actorUrn, + entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actorUrn, false); } public static void removeOwnersFromResources( - List ownerUrns, Optional maybeOwnershipTypeUrn, List resources, + @Nonnull OperationContext opContext, + List ownerUrns, + @Nullable Urn ownershipTypeUrn, + List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveOwnersProposal(ownerUrns, maybeOwnershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), - actor, entityService)); + changes.add( + buildRemoveOwnersProposal( + opContext, + ownerUrns, + ownershipTypeUrn, + UrnUtils.getUrn(resource.getResourceUrn()), + actor, + entityService)); } - EntityUtils.ingestChangeProposals(changes, entityService, actor, false); + EntityUtils.ingestChangeProposals(opContext, changes, entityService, actor, false); } - - static MetadataChangeProposal buildAddOwnersProposal(List owners, Urn resourceUrn, EntityService entityService) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, entityService, - new Ownership()); + static MetadataChangeProposal buildAddOwnersProposal( + @Nonnull OperationContext opContext, + List owners, + Urn resourceUrn, + Urn actor, + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); + ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); for (OwnerInput input : owners) { - addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn())); + addOwnerToAspect( + ownershipAspect, + UrnUtils.getUrn(input.getOwnerUrn()), + input.getType(), + UrnUtils.getUrn(input.getOwnershipTypeUrn())); } - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } public static MetadataChangeProposal buildRemoveOwnersProposal( - List ownerUrns, Optional maybeOwnershipTypeUrn, Urn resourceUrn, + @Nonnull OperationContext opContext, + List ownerUrns, + @Nullable Urn ownershipTypeUrn, + Urn resourceUrn, Urn actor, - EntityService entityService - ) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, - entityService, - new Ownership()); + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + opContext, + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); - removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + removeOwnersIfExists(ownershipAspect, ownerUrns, ownershipTypeUrn); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwnerToAspect( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipTypeUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } - final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf(owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); + removeExistingOwnerIfExists(ownerArray, ownerUrn, ownershipTypeUrn); Owner newOwner = new Owner(); // For backwards compatibility we have to always set the deprecated type. // If the type exists we assume it's an old ownership type that we can map to. // Else if it's a net new custom ownership type set old type to CUSTOM. - com.linkedin.common.OwnershipType gmsType = type != null ? com.linkedin.common.OwnershipType.valueOf(type.toString()) - : com.linkedin.common.OwnershipType.CUSTOM; + com.linkedin.common.OwnershipType gmsType = + type != null + ? com.linkedin.common.OwnershipType.valueOf(type.toString()) + : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); - newOwner.setTypeUrn(ownershipUrn); + newOwner.setTypeUrn(ownershipTypeUrn); newOwner.setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)); newOwner.setOwner(ownerUrn); ownerArray.add(newOwner); ownershipAspect.setOwners(ownerArray); } - private static void removeOwnersIfExists(Ownership ownership, List ownerUrns, - Optional maybeOwnershipTypeUrn) { - if (!ownership.hasOwners()) { - ownership.setOwners(new OwnerArray()); + private static void removeExistingOwnerIfExists( + OwnerArray ownerArray, Urn ownerUrn, Urn ownershipTypeUrn) { + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + return isOwnerEqual(owner, ownerUrn, ownershipTypeUrn); + }); + } + + public static boolean isOwnerEqual( + @Nonnull Owner owner, @Nonnull Urn ownerUrn, @Nullable Urn ownershipTypeUrn) { + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipTypeUrn); + } + if (ownershipTypeUrn == null) { + return true; + } + // Fall back to mapping deprecated type to the new ownership entity + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipTypeUrn.toString()); + } + + private static void removeOwnersIfExists( + Ownership ownershipAspect, List ownerUrns, Urn ownershipTypeUrn) { + if (!ownershipAspect.hasOwners()) { + ownershipAspect.setOwners(new OwnerArray()); } - OwnerArray ownerArray = ownership.getOwners(); + OwnerArray ownerArray = ownershipAspect.getOwners(); for (Urn ownerUrn : ownerUrns) { - if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf(owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); - } else { - ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); - } + removeExistingOwnerIfExists(ownerArray, ownerUrn, ownershipTypeUrn); } } - public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - resourceUrn.getEntityType(), - resourceUrn.toString(), - orPrivilegeGroups); + public static void validateAuthorizedToUpdateOwners( + @Nonnull QueryContext context, Urn resourceUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); + + boolean authorized = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + if (!authorized) { + throw new AuthorizationException( + "Unauthorized to update owners. Please contact your DataHub administrator."); + } } - public static Boolean validateAddOwnerInput( + public static void validateAddOwnerInput( + @Nonnull OperationContext opContext, List owners, Urn resourceUrn, - EntityService entityService - ) { + EntityService entityService) { for (OwnerInput owner : owners) { - boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); - if (!result) { - return false; - } + validateAddOwnerInput(opContext, owner, resourceUrn, entityService); } - return true; } - public static Boolean validateAddOwnerInput( + public static void validateAddOwnerInput( + @Nonnull OperationContext opContext, OwnerInput owner, Urn resourceUrn, - EntityService entityService - ) { + EntityService entityService) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } - validateOwner(owner, entityService); - - return true; + validateOwner(opContext, owner, entityService); } public static void validateOwner( - OwnerInput owner, - EntityService entityService - ) { + @Nonnull OperationContext opContext, OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); - if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) + && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp group urn, found %s", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp group urn, found %s", + ownerUrn)); } - if (OwnerEntityType.CORP_USER.equals(ownerEntityType) && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_USER.equals(ownerEntityType) + && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp user urn, found %s.", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp user urn, found %s.", + ownerUrn)); } - if (!entityService.exists(ownerUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Owner with urn %s does not exist.", ownerUrn)); + if (!entityService.exists(opContext, ownerUrn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Owner with urn %s does not exist.", + ownerUrn)); } - if (owner.getOwnershipTypeUrn() != null && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Custom Ownership type with " - + "urn %s does not exist.", owner.getOwnershipTypeUrn())); + if (owner.getOwnershipTypeUrn() != null + && !entityService.exists(opContext, UrnUtils.getUrn(owner.getOwnershipTypeUrn()), true)) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Custom Ownership type with " + + "urn %s does not exist.", + owner.getOwnershipTypeUrn())); } if (owner.getType() == null && owner.getOwnershipTypeUrn() == null) { - throw new IllegalArgumentException("Failed to change ownership for resource(s). Expected either " - + "type or ownershipTypeUrn to be specified."); + throw new IllegalArgumentException( + "Failed to change ownership for resource(s). Expected either " + + "type or ownershipTypeUrn to be specified."); } } - public static Boolean validateRemoveInput( - Urn resourceUrn, - EntityService entityService - ) { - if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + public static void validateRemoveInput( + @Nonnull OperationContext opContext, Urn resourceUrn, EntityService entityService) { + if (!entityService.exists(opContext, resourceUrn, true)) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } - return true; } public static void addCreatorAsOwner( QueryContext context, String urn, OwnerEntityType ownerEntityType, - OwnershipType ownershipType, - EntityService entityService) { + EntityService entityService) { try { Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!entityService.exists( + context.getOperationContext(), + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())), + true)) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); - if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { - throw new RuntimeException(String.format("Unknown ownership type urn %s", ownershipTypeUrn)); + if (!entityService.exists( + context.getOperationContext(), UrnUtils.getUrn(ownershipTypeUrn), true)) { + throw new RuntimeException( + String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), + context.getOperationContext(), + ImmutableList.of( + new OwnerInput( + actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), ImmutableList.of(new ResourceRefInput(urn, null, null)), actorUrn, - entityService - ); + entityService); } catch (Exception e) { log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java index f740836694dbe4..dd8aaa802864f8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java @@ -1,32 +1,38 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; + import com.linkedin.common.Siblings; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; - -import javax.annotation.Nonnull; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import javax.annotation.Nonnull; public class SiblingsUtils { - private SiblingsUtils() { } + private SiblingsUtils() {} - public static List getSiblingUrns(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { - final Siblings siblingAspectOfEntity = (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + public static List getSiblingUrns( + @Nonnull OperationContext opContext, + @Nonnull final Urn entityUrn, + @Nonnull final EntityService entityService) { + final Siblings siblingAspectOfEntity = + (Siblings) entityService.getLatestAspect(opContext, entityUrn, SIBLINGS_ASPECT_NAME); if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { return siblingAspectOfEntity.getSiblings(); } return new ArrayList<>(); } - public static Optional getNextSiblingUrn(@Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { - final List unusedSiblingUrns = siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); + public static Optional getNextSiblingUrn( + @Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { + final List unusedSiblingUrns = + siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); return unusedSiblingUrns.stream().findFirst(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java new file mode 100644 index 00000000000000..8c4e70fdac6055 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/StructuredPropertyUtils.java @@ -0,0 +1,22 @@ +package com.linkedin.datahub.graphql.resolvers.mutate.util; + +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.structured.PrimitivePropertyValue; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class StructuredPropertyUtils { + + private StructuredPropertyUtils() {} + + @Nullable + public static PrimitivePropertyValue mapPropertyValueInput( + @Nonnull final PropertyValueInput valueInput) { + if (valueInput.getStringValue() != null) { + return PrimitivePropertyValue.create(valueInput.getStringValue()); + } else if (valueInput.getNumberValue() != null) { + return PrimitivePropertyValue.create(valueInput.getNumberValue().doubleValue()); + } + return null; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index c0fe697c6654c3..6ef3222bc068f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -10,8 +17,7 @@ import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,22 +36,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for reporting Asset Operations - */ +/** Resolver used for reporting Asset Operations */ @Slf4j @RequiredArgsConstructor public class ReportOperationResolver implements DataFetcher> { - private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of( - DATASET_ENTITY_NAME - ); + private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of(DATASET_ENTITY_NAME); private final EntityClient _entityClient; @@ -53,32 +49,38 @@ public class ReportOperationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final ReportOperationInput input = bindArgument(environment.getArgument("input"), ReportOperationInput.class); - - return CompletableFuture.supplyAsync(() -> { - - Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - - if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - validateInput(entityUrn, input); - - try { - // Create an MCP to emit the operation - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, OPERATION_ASPECT_NAME, - mapOperation(input, context)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to report operation. {}", e.getMessage()); - throw new RuntimeException("Failed to report operation", e); - } - }); + final ReportOperationInput input = + bindArgument(environment.getArgument("input"), ReportOperationInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Urn entityUrn = UrnUtils.getUrn(input.getUrn()); + + if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + validateInput(entityUrn, input); + + try { + // Create an MCP to emit the operation + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + entityUrn, OPERATION_ASPECT_NAME, mapOperation(input, context)); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + log.error("Failed to report operation. {}", e.getMessage()); + throw new RuntimeException("Failed to report operation", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private Operation mapOperation(final ReportOperationInput input, final QueryContext context) throws URISyntaxException { + private Operation mapOperation(final ReportOperationInput input, final QueryContext context) + throws URISyntaxException { final Operation result = new Operation(); result.setActor(UrnUtils.getUrn(context.getActorUrn())); @@ -86,13 +88,17 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont result.setCustomOperationType(input.getCustomOperationType(), SetMode.IGNORE_NULL); result.setNumAffectedRows(input.getNumAffectedRows(), SetMode.IGNORE_NULL); - long timestampMillis = input.getTimestampMillis() != null ? input.getTimestampMillis() : System.currentTimeMillis(); + long timestampMillis = + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(); result.setLastUpdatedTimestamp(timestampMillis); result.setTimestampMillis(timestampMillis); result.setSourceType(OperationSourceType.valueOf(input.getSourceType().toString())); if (input.getPartition() != null) { - result.setPartitionSpec(new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); + result.setPartitionSpec( + new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); } if (input.getCustomProperties() != null) { @@ -102,7 +108,8 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont return result; } - private StringMap mapCustomProperties(final List properties) throws URISyntaxException { + private StringMap mapCustomProperties(final List properties) + throws URISyntaxException { final StringMap result = new StringMap(); for (StringMapEntryInput entry : properties) { result.put(entry.getKey(), entry.getValue()); @@ -113,16 +120,21 @@ private StringMap mapCustomProperties(final List properties private void validateInput(final Urn entityUrn, final ReportOperationInput input) { if (!SUPPORTED_ENTITY_TYPES.contains(entityUrn.getEntityType())) { throw new DataHubGraphQLException( - String.format("Unable to report operation. Invalid entity type %s provided.", entityUrn.getEntityType()), + String.format( + "Unable to report operation. Invalid entity type %s provided.", + entityUrn.getEntityType()), DataHubGraphQLErrorCode.BAD_REQUEST); } } - private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToReportOperationForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -131,4 +143,4 @@ private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, resourceUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java index 4cfe58072aae9d..8d48e62c11cdf3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java @@ -1,13 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -16,17 +19,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class CreateOwnershipTypeResolver implements DataFetcher> { +public class CreateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateOwnershipTypeInput input = bindArgument(environment.getArgument("input"), CreateOwnershipTypeInput.class); @@ -36,19 +38,27 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = _ownershipTypeService.createOwnershipType(input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - return createOwnershipType(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Urn urn = + _ownershipTypeService.createOwnershipType( + context.getOperationContext(), + input.getName(), + input.getDescription(), + System.currentTimeMillis()); + return createOwnershipType(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private OwnershipTypeEntity createOwnershipType(@Nonnull final Urn urn, - @Nonnull final CreateOwnershipTypeInput input) { + private OwnershipTypeEntity createOwnershipType( + @Nonnull final Urn urn, @Nonnull final CreateOwnershipTypeInput input) { return OwnershipTypeEntity.builder() .setUrn(urn.toString()) .setType(EntityType.CUSTOM_OWNERSHIP_TYPE) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java index 87cf70193d7fd7..8f76cd148e4a79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java @@ -4,6 +4,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; @@ -12,7 +13,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteOwnershipTypeResolver implements DataFetcher> { @@ -26,21 +26,28 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn urn = UrnUtils.getUrn(ownershipTypeUrn); // By default, delete references final boolean deleteReferences = - environment.getArgument("deleteReferences") == null ? true : environment.getArgument("deleteReferences"); + environment.getArgument("deleteReferences") == null + ? true + : environment.getArgument("deleteReferences"); if (!AuthorizationUtils.canManageOwnershipTypes(context)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.deleteOwnershipType(urn, deleteReferences, context.getAuthentication()); - log.info(String.format("Successfully deleted ownership type %s with urn", urn)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _ownershipTypeService.deleteOwnershipType( + context.getOperationContext(), urn, deleteReferences); + log.info(String.format("Successfully deleted ownership type %s with urn", urn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 70441815f0a747..da0d5dd07a94f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -24,18 +26,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListOwnershipTypesResolver implements - DataFetcher> { +public class ListOwnershipTypesResolver + implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,43 +42,51 @@ public class ListOwnershipTypesResolver implements private final EntityClient _entityClient; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListOwnershipTypesInput input = bindArgument(environment.getArgument("input"), - ListOwnershipTypesInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - + final ListOwnershipTypesInput input = + bindArgument(environment.getArgument("input"), ListOwnershipTypesInput.class); - try { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - final SearchResult gmsResult = _entityClient.search( - Constants.OWNERSHIP_TYPE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { - final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setOwnershipTypes(mapUnresolvedOwnershipTypes(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list custom ownership types", e); - } + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + query, + buildFilter( + filters, + Collections.emptyList(), + context.getOperationContext().getAspectRetriever()), + Collections.singletonList(DEFAULT_SORT_CRITERION), + start, + count); - }); + final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setOwnershipTypes( + mapUnresolvedOwnershipTypes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list custom ownership types", e); + } + }, + this.getClass().getSimpleName(), + "get"); } private List mapUnresolvedOwnershipTypes(List entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 43fd2493043975..45a20669558314 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.ownership; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; @@ -14,20 +16,20 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class UpdateOwnershipTypeResolver implements DataFetcher> { +public class UpdateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final UpdateOwnershipTypeInput input = @@ -39,29 +41,39 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.updateOwnershipType(urn, input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _ownershipTypeService.updateOwnershipType( + context.getOperationContext(), + urn, + input.getName(), + input.getDescription(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); + return getOwnershipType(context, urn); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private OwnershipTypeEntity getOwnershipType(@Nonnull final Urn urn, - @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); + private OwnershipTypeEntity getOwnershipType( + @Nullable QueryContext context, @Nonnull final Urn urn) { + final EntityResponse maybeResponse = + _ownershipTypeService.getOwnershipTypeEntityResponse(context.getOperationContext(), urn); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", + String.format( + "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } - return OwnershipTypeMapper.map(maybeResponse); + return OwnershipTypeMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java index 485d40e60547e8..f7b717f865035f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java @@ -3,16 +3,14 @@ import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Resolver responsible for hard deleting a particular DataHub access control policy. - */ +/** Resolver responsible for hard deleting a particular DataHub access control policy. */ public class DeletePolicyResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,18 +25,26 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final String policyUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(policyUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return policyUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against policy with urn %s", policyUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(context.getOperationContext(), urn); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return policyUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against policy with urn %s", policyUrn), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 11f7793db82c8b..a9097fa68a07de 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -1,30 +1,31 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetGrantedPrivilegesInput; import com.linkedin.datahub.graphql.generated.Privileges; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - /** - * Resolver to support the getGrantedPrivileges end point - * Fetches all privileges that are granted for the given actor for the given resource (optional) + * Resolver to support the getGrantedPrivileges end point Fetches all privileges that are granted + * for the given actor for the given resource (optional) */ public class GetGrantedPrivilegesResolver implements DataFetcher> { @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final GetGrantedPrivilegesInput input = @@ -33,22 +34,29 @@ public CompletableFuture get(final DataFetchingEnvironment environme if (!isAuthorized(context, actor)) { throw new AuthorizationException("Unauthorized to get privileges for the given author."); } - final Optional resourceSpec = Optional.ofNullable(input.getResourceSpec()) - .map(spec -> new EntitySpec(EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); + final Optional resourceSpec = + Optional.ofNullable(input.getResourceSpec()) + .map( + spec -> + new EntitySpec( + EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); if (context.getAuthorizer() instanceof AuthorizerChain) { - DataHubAuthorizer dataHubAuthorizer = ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); + DataHubAuthorizer dataHubAuthorizer = + ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); List privileges = dataHubAuthorizer.getGrantedPrivileges(actor, resourceSpec); - return CompletableFuture.supplyAsync(() -> Privileges.builder() - .setPrivileges(privileges) - .build()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> Privileges.builder().setPrivileges(privileges).build(), + this.getClass().getSimpleName(), + "get"); } throw new UnsupportedOperationException( - String.format("GetGrantedPrivileges function is not supported on authorizer of type %s", + String.format( + "GetGrantedPrivileges function is not supported on authorizer of type %s", context.getAuthorizer().getClass().getSimpleName())); } private boolean isAuthorized(final QueryContext context, final String actor) { return actor.equals(context.getActorUrn()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index 516d6fa2d31372..ce11451aa1913f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -1,22 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.AndFilterInput; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListPoliciesInput; import com.linkedin.datahub.graphql.generated.ListPoliciesResult; import com.linkedin.datahub.graphql.generated.Policy; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.policy.mappers.PolicyInfoPolicyMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - +@Slf4j public class ListPoliciesResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -30,42 +38,58 @@ public ListPoliciesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (PolicyAuthUtils.canManagePolicies(context)) { - final ListPoliciesInput input = bindArgument(environment.getArgument("input"), ListPoliciesInput.class); + final ListPoliciesInput input = + bindArgument(environment.getArgument("input"), ListPoliciesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); + log.debug( + "User {} listing policies with filters {}", context.getActorUrn(), filters.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final PolicyFetcher.PolicyFetchResult policyFetchResult = - _policyFetcher.fetchPolicies(start, count, query, context.getAuthentication()); + final Filter filter = + ResolverUtils.buildFilter( + facetFilters, + Collections.emptyList(), + context.getOperationContext().getAspectRetriever()); - // Now that we have entities we can bind this to a result. - final ListPoliciesResult result = new ListPoliciesResult(); - result.setStart(start); - result.setCount(count); - result.setTotal(policyFetchResult.getTotal()); - result.setPolicies(mapEntities(policyFetchResult.getPolicies())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list policies", e); - } - }); + return _policyFetcher + .fetchPolicies(context.getOperationContext(), start, query, count, filter) + .thenApply( + policyFetchResult -> { + final ListPoliciesResult result = new ListPoliciesResult(); + result.setStart(start); + result.setCount(count); + result.setTotal(policyFetchResult.getTotal()); + result.setPolicies(mapEntities(context, policyFetchResult.getPolicies())); + return result; + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final List policies) { - return policies.stream().map(policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); - mappedPolicy.setUrn(policy.getUrn().toString()); - return mappedPolicy; - }).collect(Collectors.toList()); + private static List mapEntities( + @Nullable QueryContext context, final List policies) { + return policies.stream() + .map( + policy -> { + Policy mappedPolicy = PolicyInfoPolicyMapper.map(context, policy.getPolicyInfo()); + mappedPolicy.setUrn(policy.getUrn().toString()); + return mappedPolicy; + }) + .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index dcc5d1fd23302e..7babe63745f727 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,19 +1,19 @@ package com.linkedin.datahub.graphql.resolvers.policy; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; + +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } - private PolicyAuthUtils() { } + private PolicyAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index 6dcc143a1a3af4..5fd6f0917a25ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.PolicyUpdateInput; import com.linkedin.datahub.graphql.resolvers.policy.mappers.PolicyUpdateInputInfoMapper; @@ -16,10 +20,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - public class UpsertPolicyResolver implements DataFetcher> { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; @@ -38,17 +38,20 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final Optional policyUrn = Optional.ofNullable(environment.getArgument("urn")); - final PolicyUpdateInput input = bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); + final PolicyUpdateInput input = + bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; - final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(input); + final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(context, input); info.setLastUpdatedTimestamp(System.currentTimeMillis()); if (policyUrn.isPresent()) { // Update existing policy - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); } else { // Create new policy // Since we are creating a new Policy, we need to generate a unique UUID. @@ -58,21 +61,31 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Policy key. final DataHubPolicyKey key = new DataHubPolicyKey(); key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithKey( + key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); } - return CompletableFuture.supplyAsync(() -> { - try { - String urn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return urn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + String urn = + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return urn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index b9a6bf07be8c86..2eaa08069a688e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,8 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterion; @@ -9,7 +11,6 @@ import com.linkedin.datahub.graphql.generated.PolicyMatchFilter; import com.linkedin.datahub.graphql.generated.PolicyState; import com.linkedin.datahub.graphql.generated.PolicyType; -import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.ResourceFilter; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -19,21 +20,23 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; /** - * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link com.linkedin.datahub.graphql.generated.Policy}. + * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link + * com.linkedin.datahub.graphql.generated.Policy}. */ public class PolicyInfoPolicyMapper implements ModelMapper { public static final PolicyInfoPolicyMapper INSTANCE = new PolicyInfoPolicyMapper(); - public static Policy map(@Nonnull final DataHubPolicyInfo policyInfo) { - return INSTANCE.apply(policyInfo); + public static Policy map( + @Nullable QueryContext context, @Nonnull final DataHubPolicyInfo policyInfo) { + return INSTANCE.apply(context, policyInfo); } @Override - public Policy apply(DataHubPolicyInfo info) { + public Policy apply(@Nullable QueryContext context, DataHubPolicyInfo info) { final Policy result = new Policy(); result.setDescription(info.getDescription()); // Careful - we assume no other Policy types or states have been ingested using a backdoor. @@ -44,7 +47,7 @@ public Policy apply(DataHubPolicyInfo info) { result.setActors(mapActors(info.getActors())); result.setEditable(info.isEditable()); if (info.hasResources()) { - result.setResources(mapResources(info.getResources())); + result.setResources(mapResources(context, info.getResources())); } return result; } @@ -56,21 +59,26 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { result.setResourceOwners(actorFilter.isResourceOwners()); UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } - private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) { + private static ResourceFilter mapResources( + @Nullable QueryContext context, final DataHubResourceFilter resourceFilter) { final ResourceFilter result = new ResourceFilter(); result.setAllResources(resourceFilter.isAllResources()); if (resourceFilter.hasType()) { @@ -80,29 +88,40 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) result.setResources(resourceFilter.getResources()); } if (resourceFilter.hasFilter()) { - result.setFilter(mapFilter(resourceFilter.getFilter())); + result.setFilter(mapFilter(context, resourceFilter.getFilter())); } return result; } - private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { + private static PolicyMatchFilter mapFilter( + @Nullable QueryContext context, final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(v -> mapValue(context, v)) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private static PolicyMatchCriterionValue mapValue( + @Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(context, urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index cb323b60dd4653..0397f764f61bdd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -3,6 +3,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilterInput; import com.linkedin.datahub.graphql.generated.PolicyMatchFilterInput; import com.linkedin.datahub.graphql.generated.PolicyUpdateInput; @@ -18,21 +19,22 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -/** - * Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. - */ -public class PolicyUpdateInputInfoMapper implements ModelMapper { +/** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ +public class PolicyUpdateInputInfoMapper + implements ModelMapper { public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); - public static DataHubPolicyInfo map(@Nonnull final PolicyUpdateInput policyInput) { - return INSTANCE.apply(policyInput); + public static DataHubPolicyInfo map( + @Nullable QueryContext context, @Nonnull final PolicyUpdateInput policyInput) { + return INSTANCE.apply(context, policyInput); } @Override - public DataHubPolicyInfo apply(@Nonnull final PolicyUpdateInput policyInput) { + public DataHubPolicyInfo apply( + @Nullable QueryContext queryContext, @Nonnull final PolicyUpdateInput policyInput) { final DataHubPolicyInfo result = new DataHubPolicyInfo(); result.setDescription(policyInput.getDescription()); result.setType(policyInput.getType().toString()); @@ -52,13 +54,21 @@ private DataHubActorFilter mapActors(final ActorFilterInput actorInput) { result.setAllUsers(actorInput.getAllUsers()); result.setResourceOwners(actorInput.getResourceOwners()); if (actorInput.getResourceOwnersTypes() != null) { - result.setResourceOwnersTypes(new UrnArray(actorInput.getResourceOwnersTypes().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setResourceOwnersTypes( + new UrnArray( + actorInput.getResourceOwnersTypes().stream() + .map(this::createUrn) + .collect(Collectors.toList()))); } if (actorInput.getGroups() != null) { - result.setGroups(new UrnArray(actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setGroups( + new UrnArray( + actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); } if (actorInput.getUsers() != null) { - result.setUsers(new UrnArray(actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setUsers( + new UrnArray( + actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); } return result; } @@ -83,19 +93,26 @@ private DataHubResourceFilter mapResources(final ResourceFilterInput resourceInp } private PolicyMatchFilter mapFilter(final PolicyMatchFilterInput filter) { - return new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray(filter.getCriteria() - .stream() - .map(criterion -> new PolicyMatchCriterion().setField(criterion.getField()) - .setValues(new StringArray(criterion.getValues())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name()))) - .collect(Collectors.toList()))); + return new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + filter.getCriteria().stream() + .map( + criterion -> + new PolicyMatchCriterion() + .setField(criterion.getField()) + .setValues(new StringArray(criterion.getValues())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name()))) + .collect(Collectors.toList()))); } private Urn createUrn(String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urnStr %s into an URN object", urnStr), e); + throw new RuntimeException( + String.format("Failed to convert urnStr %s into an URN object", urnStr), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java index 524caf14e9afe4..8b4253501dedc7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreatePostInput; import com.linkedin.datahub.graphql.generated.PostContentType; @@ -18,16 +21,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreatePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { @@ -35,7 +36,8 @@ public CompletableFuture get(final DataFetchingEnvironment environment) "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); } - final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final CreatePostInput input = + bindArgument(environment.getArgument("input"), CreatePostInput.class); final PostType type = input.getPostType(); final UpdatePostContentInput content = input.getContent(); final PostContentType contentType = content.getContentType(); @@ -45,16 +47,24 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final UpdateMediaInput updateMediaInput = content.getMedia(); final Authentication authentication = context.getAuthentication(); - Media media = updateMediaInput == null ? null - : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); - PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); - - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.createPost(type.toString(), postContent, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + Media media = + updateMediaInput == null + ? null + : _postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return _postService.createPost( + context.getOperationContext(), type.toString(), postContent); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java index d3cd0126fb8527..572b78a3e2cf63 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java @@ -6,6 +6,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -13,14 +14,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeletePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canManageGlobalAnnouncements(context)) { @@ -31,12 +32,15 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final Urn postUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.deletePost(postUrn, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return _postService.deletePost(context.getOperationContext(), postUrn); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 59f2b458fdc90c..dc7797882371b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -1,31 +1,32 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.ListPostsInput; import com.linkedin.datahub.graphql.generated.ListPostsResult; import com.linkedin.datahub.graphql.types.post.PostMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListPostsResolver implements DataFetcher> { @@ -36,38 +37,62 @@ public class ListPostsResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final ListPostsInput input = + bindArgument(environment.getArgument("input"), ListPostsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final List sortCriteria = + Collections.singletonList( + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)); - // First, get all Post Urns. - final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + // First, get all Post Urns. + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + POST_ENTITY_NAME, + query, + null, + sortCriteria, + start, + count); - // Then, get and hydrate all Posts. - final Map entities = _entityClient.batchGetV2(POST_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, authentication); + // Then, get and hydrate all Posts. + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + POST_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null); - final ListPostsResult result = new ListPostsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list posts", e); - } - }); + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts( + entities.values().stream() + .map(e -> PostMapper.map(context, e)) + .collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java new file mode 100644 index 00000000000000..8f7eee74046841 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolver.java @@ -0,0 +1,74 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import com.linkedin.datahub.graphql.generated.UpdatePostInput; +import com.linkedin.post.PostContent; +import graphql.GraphQLException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class UpdatePostResolver implements DataFetcher> { + private final PostService postService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { + throw new AuthorizationException( + "Unauthorized to update posts. Please contact your DataHub administrator if this needs corrective action."); + } + + final UpdatePostInput input = + bindArgument(environment.getArgument("input"), UpdatePostInput.class); + final Urn postUrn = Urn.createFromString(input.getUrn()); + + final PostType type = input.getPostType(); + final UpdatePostContentInput content = input.getContent(); + final PostContentType contentType = content.getContentType(); + final String title = content.getTitle(); + final String link = content.getLink(); + final String description = content.getDescription(); + final UpdateMediaInput updateMediaInput = content.getMedia(); + final Authentication authentication = context.getAuthentication(); + + Media media = + updateMediaInput == null + ? null + : postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + postService.mapPostContent(contentType.toString(), title, description, link, media); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return postService.updatePost( + context.getOperationContext(), postUrn, type.toString(), postContent); + } catch (Exception e) { + throw new GraphQLException("Failed to update or edit post", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 27de443bc100a0..dde38566490142 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -1,14 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateQueryInput; -import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.CreateQuerySubjectInput; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.types.query.QueryMapper; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QueryLanguage; @@ -22,9 +25,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateQueryResolver implements DataFetcher> { @@ -32,40 +32,53 @@ public class CreateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateQueryInput input = bindArgument(environment.getArgument("input"), CreateQueryInput.class); + final CreateQueryInput input = + bindArgument(environment.getArgument("input"), CreateQueryInput.class); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!AuthorizationUtils.canCreateQuery(input.getSubjects() - .stream() - .map(CreateQuerySubjectInput::getDatasetUrn).map(UrnUtils::getUrn) - .collect(Collectors.toList()), context)) { - throw new AuthorizationException( - "Unauthorized to create Query. Please contact your DataHub administrator for more information."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateQuery( + input.getSubjects().stream() + .map(CreateQuerySubjectInput::getDatasetUrn) + .map(UrnUtils::getUrn) + .collect(Collectors.toList()), + context)) { + throw new AuthorizationException( + "Unauthorized to create Query. Please contact your DataHub administrator for more information."); + } - try { - final Urn queryUrn = _queryService.createQuery( - input.getProperties().getName(), - input.getProperties().getDescription(), - QuerySource.MANUAL, - new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())), - input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()), - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new Query from input %s", input), e); - } - }); + try { + final Urn queryUrn = + _queryService.createQuery( + context.getOperationContext(), + input.getProperties().getName(), + input.getProperties().getDescription(), + QuerySource.MANUAL, + new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())), + input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()), + System.currentTimeMillis()); + return QueryMapper.map( + context, + _queryService.getQueryEntityResponse(context.getOperationContext(), queryUrn)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new Query from input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java index 5c5bb288f32bf6..0879b41dd27abd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java @@ -5,6 +5,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QuerySubject; @@ -18,7 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteQueryResolver implements DataFetcher> { @@ -26,29 +26,36 @@ public class DeleteQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); - final List subjectUrns = existingSubjects != null - ? existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()) - : Collections.emptyList(); - - if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); - } - - try { - _queryService.deleteQuery(queryUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Query", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(context.getOperationContext(), queryUrn); + final List subjectUrns = + existingSubjects != null + ? existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()) + : Collections.emptyList(); + + if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); + } + + try { + _queryService.deleteQuery(context.getOperationContext(), queryUrn); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Query", e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index c7e70cac15bdb4..aa411f019a4c08 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -11,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -29,10 +33,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListQueriesResolver implements DataFetcher> { @@ -48,38 +48,57 @@ public class ListQueriesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListQueriesInput input = bindArgument(environment.getArgument("input"), ListQueriesInput.class); + final ListQueriesInput input = + bindArgument(environment.getArgument("input"), ListQueriesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); - - // First, get all Query Urns. - final SearchResult gmsResult = _entityClient.search(QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true).setSkipHighlighting(true)); - - final ListQueriesResult result = new ListQueriesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setQueries(mapUnresolvedQueries(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Queries", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final List sortCriteria = + Collections.singletonList( + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)); + + // First, get all Query Urns. + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags( + flags -> flags.setFulltext(true).setSkipHighlighting(true)), + QUERY_ENTITY_NAME, + query, + buildFilters(input, context.getOperationContext().getAspectRetriever()), + sortCriteria, + start, + count); + + final ListQueriesResult result = new ListQueriesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setQueries( + mapUnresolvedQueries( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Queries", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - // This method maps urns returned from the list endpoint into Partial Query objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Query objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedQueries(final List queryUrns) { final List results = new ArrayList<>(); for (final Urn urn : queryUrns) { @@ -92,23 +111,34 @@ private List mapUnresolvedQueries(final List queryUrns) { } @Nullable - private Filter buildFilters(@Nonnull final ListQueriesInput input) { + private Filter buildFilters( + @Nonnull final ListQueriesInput input, @Nullable AspectRetriever aspectRetriever) { final AndFilterInput criteria = new AndFilterInput(); List andConditions = new ArrayList<>(); // Optionally add a source filter. if (input.getSource() != null) { andConditions.add( - new FacetFilterInput(QUERY_SOURCE_FIELD, null, ImmutableList.of(input.getSource().toString()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_SOURCE_FIELD, + null, + ImmutableList.of(input.getSource().toString()), + false, + FilterOperator.EQUAL)); } // Optionally add an entity type filter. if (input.getDatasetUrn() != null) { andConditions.add( - new FacetFilterInput(QUERY_ENTITIES_FIELD, null, ImmutableList.of(input.getDatasetUrn()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_ENTITIES_FIELD, + null, + ImmutableList.of(input.getDatasetUrn()), + false, + FilterOperator.EQUAL)); } criteria.setAnd(andConditions); - return buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); + return buildFilter(Collections.emptyList(), ImmutableList.of(criteria), aspectRetriever); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index ef34e91d8fe772..29a02b2b0519bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -26,9 +29,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class UpdateQueryResolver implements DataFetcher> { @@ -36,60 +36,76 @@ public class UpdateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateQueryInput input = bindArgument(environment.getArgument("input"), UpdateQueryInput.class); + final UpdateQueryInput input = + bindArgument(environment.getArgument("input"), UpdateQueryInput.class); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(context.getOperationContext(), queryUrn); - if (existingSubjects == null) { - // No Query Found - throw new DataHubGraphQLException(String.format("Failed to find query with urn %s", queryUrn), DataHubGraphQLErrorCode.NOT_FOUND); - } + if (existingSubjects == null) { + // No Query Found + throw new DataHubGraphQLException( + String.format("Failed to find query with urn %s", queryUrn), + DataHubGraphQLErrorCode.NOT_FOUND); + } - final List subjectUrns = existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()); - final List newSubjectUrns = input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) - .collect(Collectors.toList()) - : Collections.emptyList(); - final List impactedSubjectUrns = new ArrayList<>(); - impactedSubjectUrns.addAll(subjectUrns); - impactedSubjectUrns.addAll(newSubjectUrns); + final List subjectUrns = + existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()); + final List newSubjectUrns = + input.getSubjects() != null + ? input.getSubjects().stream() + .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) + .collect(Collectors.toList()) + : Collections.emptyList(); + final List impactedSubjectUrns = new ArrayList<>(); + impactedSubjectUrns.addAll(subjectUrns); + impactedSubjectUrns.addAll(newSubjectUrns); - if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); - } + if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); + } - try { - _queryService.updateQuery( - queryUrn, - input.getProperties() != null ? input.getProperties().getName() : null, - input.getProperties() != null ? input.getProperties().getDescription() : null, - input.getProperties() != null && input.getProperties().getStatement() != null - ? new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())) - : null, - input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()) - : null, - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Query from input %s", input), e); - } - }); + try { + _queryService.updateQuery( + context.getOperationContext(), + queryUrn, + input.getProperties() != null ? input.getProperties().getName() : null, + input.getProperties() != null ? input.getProperties().getDescription() : null, + input.getProperties() != null && input.getProperties().getStatement() != null + ? new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())) + : null, + input.getSubjects() != null + ? input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()) + : null, + System.currentTimeMillis()); + return QueryMapper.map( + context, + _queryService.getQueryEntityResponse(context.getOperationContext(), queryUrn)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update Query from input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index df1a6d4d4b00dd..01818778643905 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.ContentParams; import com.linkedin.datahub.graphql.generated.EntityProfileParams; import com.linkedin.datahub.graphql.generated.FacetFilter; @@ -13,14 +17,16 @@ import com.linkedin.datahub.graphql.generated.RecommendationRenderType; import com.linkedin.datahub.graphql.generated.RecommendationRequestContext; import com.linkedin.datahub.graphql.generated.SearchParams; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.EntityRequestContext; import com.linkedin.metadata.recommendation.RecommendationsService; import com.linkedin.metadata.recommendation.SearchRequestContext; +import com.linkedin.metadata.service.ViewService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.util.Collections; @@ -28,68 +34,81 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListRecommendationsResolver implements DataFetcher> { +public class ListRecommendationsResolver + implements DataFetcher> { private static final ListRecommendationsResult EMPTY_RECOMMENDATIONS = new ListRecommendationsResult(Collections.emptyList()); private final RecommendationsService _recommendationsService; + private final ViewService _viewService; @WithSpan @Override public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Listing recommendations for input {}", input); - List modules = - _recommendationsService.listRecommendations(Urn.createFromString(input.getUserUrn()), - mapRequestContext(input.getRequestContext()), input.getLimit()); - return ListRecommendationsResult.builder() - .setModules(modules.stream() - .map(this::mapRecommendationModule) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList())) - .build(); - } catch (Exception e) { - log.error("Failed to get recommendations for input {}", input, e); - return EMPTY_RECOMMENDATIONS; - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug("Listing recommendations for input {}", input); + List modules = + _recommendationsService.listRecommendations( + context.getOperationContext(), + mapRequestContext(context.getOperationContext(), input.getRequestContext()), + viewFilter(context.getOperationContext(), _viewService, input.getViewUrn()), + input.getLimit()); + return ListRecommendationsResult.builder() + .setModules( + modules.stream() + .map(m -> mapRecommendationModule(context, m)) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())) + .build(); + } catch (Exception e) { + log.error("Failed to get recommendations for input {}", input, e); + return EMPTY_RECOMMENDATIONS; + } + }, + this.getClass().getSimpleName(), + "get"); } private com.linkedin.metadata.recommendation.RecommendationRequestContext mapRequestContext( - RecommendationRequestContext requestContext) { + @Nonnull OperationContext opContext, RecommendationRequestContext requestContext) { com.linkedin.metadata.recommendation.ScenarioType mappedScenarioType; try { mappedScenarioType = - com.linkedin.metadata.recommendation.ScenarioType.valueOf(requestContext.getScenario().toString()); + com.linkedin.metadata.recommendation.ScenarioType.valueOf( + requestContext.getScenario().toString()); } catch (IllegalArgumentException e) { log.error("Failed to map scenario type: {}", requestContext.getScenario(), e); throw e; } com.linkedin.metadata.recommendation.RecommendationRequestContext mappedRequestContext = - new com.linkedin.metadata.recommendation.RecommendationRequestContext().setScenario(mappedScenarioType); + new com.linkedin.metadata.recommendation.RecommendationRequestContext() + .setScenario(mappedScenarioType); if (requestContext.getSearchRequestContext() != null) { SearchRequestContext searchRequestContext = new SearchRequestContext().setQuery(requestContext.getSearchRequestContext().getQuery()); if (requestContext.getSearchRequestContext().getFilters() != null) { - searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() - .getFilters() - .stream() - .map(facetField -> criterionFromFilter(facetField)) - .collect(Collectors.toList()))); + searchRequestContext.setFilters( + new CriterionArray( + requestContext.getSearchRequestContext().getFilters().stream() + .map( + facetField -> + criterionFromFilter(facetField, opContext.getAspectRetriever())) + .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); } @@ -98,39 +117,49 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq try { entityUrn = Urn.createFromString(requestContext.getEntityRequestContext().getUrn()); } catch (URISyntaxException e) { - log.error("Malformed URN while mapping recommendations request: {}", - requestContext.getEntityRequestContext().getUrn(), e); + log.error( + "Malformed URN while mapping recommendations request: {}", + requestContext.getEntityRequestContext().getUrn(), + e); throw new IllegalArgumentException(e); } - EntityRequestContext entityRequestContext = new EntityRequestContext().setUrn(entityUrn) - .setType(EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); + EntityRequestContext entityRequestContext = + new EntityRequestContext() + .setUrn(entityUrn) + .setType( + EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); mappedRequestContext.setEntityRequestContext(entityRequestContext); } return mappedRequestContext; } private Optional mapRecommendationModule( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationModule module) { RecommendationModule mappedModule = new RecommendationModule(); mappedModule.setTitle(module.getTitle()); mappedModule.setModuleId(module.getModuleId()); try { - mappedModule.setRenderType(RecommendationRenderType.valueOf(module.getRenderType().toString())); + mappedModule.setRenderType( + RecommendationRenderType.valueOf(module.getRenderType().toString())); } catch (IllegalArgumentException e) { log.error("Failed to map render type: {}", module.getRenderType(), e); throw e; } mappedModule.setContent( - module.getContent().stream().map(this::mapRecommendationContent).collect(Collectors.toList())); + module.getContent().stream() + .map(c -> mapRecommendationContent(context, c)) + .collect(Collectors.toList())); return Optional.of(mappedModule); } private RecommendationContent mapRecommendationContent( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationContent content) { RecommendationContent mappedContent = new RecommendationContent(); mappedContent.setValue(content.getValue()); if (content.hasEntity()) { - mappedContent.setEntity(UrnToEntityMapper.map(content.getEntity())); + mappedContent.setEntity(UrnToEntityMapper.map(context, content.getEntity())); } if (content.hasParams()) { mappedContent.setParams(mapRecommendationParams(content.getParams())); @@ -145,26 +174,31 @@ private RecommendationParams mapRecommendationParams( SearchParams searchParams = new SearchParams(); searchParams.setQuery(params.getSearchParams().getQuery()); if (!params.getSearchParams().getFilters().isEmpty()) { - searchParams.setFilters(params.getSearchParams() - .getFilters() - .stream() - .map(criterion -> FacetFilter.builder().setField(criterion.getField()).setValues( - ImmutableList.of(criterion.getValue())).build()) - .collect(Collectors.toList())); + searchParams.setFilters( + params.getSearchParams().getFilters().stream() + .map( + criterion -> + FacetFilter.builder() + .setField(criterion.getField()) + .setValues(ImmutableList.of(criterion.getValue())) + .build()) + .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); } if (params.hasEntityProfileParams()) { Urn profileUrn = params.getEntityProfileParams().getUrn(); - mappedParams.setEntityProfileParams(EntityProfileParams.builder() - .setUrn(profileUrn.toString()) - .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) - .build()); + mappedParams.setEntityProfileParams( + EntityProfileParams.builder() + .setUrn(profileUrn.toString()) + .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) + .build()); } if (params.hasContentParams()) { - mappedParams.setContentParams(ContentParams.builder().setCount(params.getContentParams().getCount()).build()); + mappedParams.setContentParams( + ContentParams.builder().setCount(params.getContentParams().getCount()).build()); } return mappedParams; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java index 43d975344ba25e..51b3569c3cc6ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AcceptRoleInput; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -13,11 +16,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j - @RequiredArgsConstructor public class AcceptRoleResolver implements DataFetcher> { private final RoleService _roleService; @@ -27,25 +26,36 @@ public class AcceptRoleResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final AcceptRoleInput input = bindArgument(environment.getArgument("input"), AcceptRoleInput.class); + final AcceptRoleInput input = + bindArgument(environment.getArgument("input"), AcceptRoleInput.class); final String inviteTokenStr = input.getInviteToken(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { - throw new RuntimeException(String.format("Invite token %s is invalid", inviteTokenStr)); - } - - final Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); - _roleService.batchAssignRoleToActors(Collections.singletonList(authentication.getActor().toUrnStr()), roleUrn, - authentication); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to accept role using invite token %s", inviteTokenStr), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); + if (!_inviteTokenService.isInviteTokenValid( + context.getOperationContext(), inviteTokenUrn)) { + throw new RuntimeException( + String.format("Invite token %s is invalid", inviteTokenStr)); + } + + final Urn roleUrn = + _inviteTokenService.getInviteTokenRole( + context.getOperationContext(), inviteTokenUrn); + _roleService.batchAssignRoleToActors( + context.getOperationContext(), + Collections.singletonList(authentication.getActor().toUrnStr()), + roleUrn); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to accept role using invite token %s", inviteTokenStr), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java index dc847069afae91..efadc2288df97d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.BatchAssignRoleInput; import graphql.schema.DataFetcher; @@ -13,10 +17,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAssignRoleResolver implements DataFetcher> { @@ -30,19 +30,24 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw "Unauthorized to assign roles. Please contact your DataHub administrator if this needs corrective action."); } - final BatchAssignRoleInput input = bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); + final BatchAssignRoleInput input = + bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); final String roleUrnStr = input.getRoleUrn(); final List actors = input.getActors(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); - _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); + _roleService.batchAssignRoleToActors(context.getOperationContext(), actors, roleUrn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 6bdf52e2f89f1b..934a9d66fe2091 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateInviteTokenInput; import com.linkedin.datahub.graphql.generated.InviteToken; @@ -12,33 +16,37 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to create invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final CreateInviteTokenInput input = bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); + final CreateInviteTokenInput input = + bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create invite token for role %s", roleUrnStr), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, true)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create invite token for role %s", roleUrnStr), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 0b0cbbb7ba4732..20477e75699163 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.GetInviteTokenInput; import com.linkedin.datahub.graphql.generated.InviteToken; @@ -12,33 +16,37 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class GetInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to get invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final GetInviteTokenInput input = bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); + final GetInviteTokenInput input = + bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to get invite token for role %s", roleUrnStr), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken( + context.getOperationContext(), roleUrnStr, false)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to get invite token for role %s", roleUrnStr), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 4746370d8603b8..813753289a7580 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -1,14 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.ListRolesInput; import com.linkedin.datahub.graphql.generated.ListRolesResult; import com.linkedin.datahub.graphql.types.role.mappers.DataHubRoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -21,13 +24,10 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListRolesResolver implements DataFetcher> { @@ -38,41 +38,58 @@ public class ListRolesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListRolesInput input = bindArgument(environment.getArgument("input"), ListRolesInput.class); + final ListRolesInput input = + bindArgument(environment.getArgument("input"), ListRolesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all role Urns. - final SearchResult gmsResult = - _entityClient.search(DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all role Urns. + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + DATAHUB_ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count); - // Then, get and hydrate all users. - final Map entities = _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, context.getAuthentication()); + // Then, get and hydrate all users. + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null); - final ListRolesResult result = new ListRolesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list roles", e); - } - }); + final ListRolesResult result = new ListRolesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setRoles(mapEntitiesToRoles(context, entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list roles", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private List mapEntitiesToRoles(final Collection entities) { + private static List mapEntitiesToRoles( + @Nullable QueryContext context, final Collection entities) { return entities.stream() - .map(DataHubRoleMapper::map) + .map(e -> DataHubRoleMapper.map(context, e)) .sorted(Comparator.comparing(DataHubRole::getName)) .collect(Collectors.toList()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index e9140441999e26..19bccaf2650866 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -1,7 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AggregateAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.AggregateResults; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; @@ -10,32 +16,32 @@ import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - +import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** - * Executes a search query only to get a provided list of aggregations back. - * Does not resolve any entities as results. + * Executes a search query only to get a provided list of aggregations back. Does not resolve any + * entities as results. */ @Slf4j @RequiredArgsConstructor -public class AggregateAcrossEntitiesResolver implements DataFetcher> { +public class AggregateAcrossEntitiesResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; + private final FormService _formService; @Override public CompletableFuture get(DataFetchingEnvironment environment) { @@ -48,48 +54,81 @@ public CompletableFuture get(DataFetchingEnvironment environme // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - - final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; - - try { - return mapAggregateResults(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - 0, - 0, // 0 entity count because we don't want resolved entities - searchFlags, - null, - ResolverUtils.getAuthentication(environment), - facets)); - } catch (Exception e) { - log.error( - "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", - input.getTypes(), input.getQuery(), input.getOrFilters()); - throw new RuntimeException( - "Failed to execute aggregate across entities: " + String.format("entity types %s, query %s, filters: %s", - input.getTypes(), input.getQuery(), input.getOrFilters()), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + context.getOperationContext(), + _viewService, + UrnUtils.getUrn(input.getViewUrn())) + : null; + + final Filter inputFilter = + ResolverUtils.buildFilter( + null, input.getOrFilters(), context.getOperationContext().getAspectRetriever()); + + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); + + final List facets = + input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + + List finalEntities = + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames; + if (finalEntities.size() == 0) { + return createEmptyAggregateResults(); + } + + try { + return mapAggregateResults( + context, + _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), + finalEntities, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + inputFilter, maybeResolvedView.getDefinition().getFilter()) + : inputFilter, + 0, + 0, // 0 entity count because we don't want resolved entities + Collections.emptyList(), + facets)); + } catch (Exception e) { + log.error( + "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters()); + throw new RuntimeException( + "Failed to execute aggregate across entities: " + + String.format( + "entity types %s, query %s, filters: %s", + input.getTypes(), input.getQuery(), input.getOrFilters()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } - AggregateResults mapAggregateResults(SearchResult searchResult) { + static AggregateResults mapAggregateResults( + @Nullable QueryContext context, SearchResult searchResult) { final AggregateResults results = new AggregateResults(); - results.setFacets(searchResult.getMetadata().getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + results.setFacets( + searchResult.getMetadata().getAggregations().stream() + .map(f -> MapperUtils.mapFacet(context, f)) + .collect(Collectors.toList())); return results; } + + AggregateResults createEmptyAggregateResults() { + final AggregateResults result = new AggregateResults(); + result.setFacets(new ArrayList<>()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 043ecf5eb97f18..e0855aaad48ca5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -1,99 +1,119 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleInput; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; -import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.apache.commons.lang3.StringUtils.isBlank; +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ +public class AutoCompleteForMultipleResolver + implements DataFetcher> { -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ -public class AutoCompleteForMultipleResolver implements DataFetcher> { + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); + private final Map> _typeToEntity; + private final ViewService _viewService; - private final Map> _typeToEntity; - private final ViewService _viewService; - - public AutoCompleteForMultipleResolver(@Nonnull final List> searchableEntities, @Nonnull final ViewService viewService) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - _viewService = viewService; - } + public AutoCompleteForMultipleResolver( + @Nonnull final List> searchableEntities, + @Nonnull final ViewService viewService) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + _viewService = viewService; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteMultipleInput input = + bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); - if (isBlank(input.getQuery())) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) + if (isBlank(input.getQuery())) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + context.getOperationContext(), _viewService, UrnUtils.getUrn(input.getViewUrn())) : null; - List types = getEntityTypes(input.getTypes(), maybeResolvedView); - if (types != null && types.size() > 0) { - return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); - } - - // By default, autocomplete only against the Default Set of Autocomplete entities - return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); + List types = getEntityTypes(input.getTypes(), maybeResolvedView); + types = + types != null + ? types.stream() + .filter(AUTO_COMPLETE_ENTITY_TYPES::contains) + .collect(Collectors.toList()) + : null; + if (types != null && types.size() > 0) { + return AutocompleteUtils.batchGetAutocompleteResults( + types.stream() + .map(_typeToEntity::get) + .filter(Objects::nonNull) + .collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); } - /** - * Gets the intersection of provided input types and types on the view applied (if any) - */ - @Nullable - List getEntityTypes(final @Nullable List inputTypes, final @Nullable DataHubViewInfo maybeResolvedView) { - List types = inputTypes; - if (maybeResolvedView != null) { - List inputEntityTypes = types != null ? types : new ArrayList<>(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - List stringEntityTypes = SearchUtils.intersectEntityTypes(inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); + // By default, autocomplete only against the Default Set of Autocomplete entities + return AutocompleteUtils.batchGetAutocompleteResults( + AUTO_COMPLETE_ENTITY_TYPES.stream() + .map(_typeToEntity::get) + .filter(Objects::nonNull) + .collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); + } - types = stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); - } + /** Gets the intersection of provided input types and types on the view applied (if any) */ + @Nullable + List getEntityTypes( + final @Nullable List inputTypes, + final @Nullable DataHubViewInfo maybeResolvedView) { + List types = inputTypes; + if (maybeResolvedView != null) { + List inputEntityTypes = types != null ? types : new ArrayList<>(); + final List inputEntityNames = + inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List stringEntityTypes = + SearchUtils.intersectEntityTypes( + inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); - return types; + types = + stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); } + + return types; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java index e13545aadc5167..79792940ef27f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java @@ -1,90 +1,103 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.linkedin.datahub.graphql.types.SearchableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static org.apache.commons.lang3.StringUtils.isBlank; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteInput; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ public class AutoCompleteResolver implements DataFetcher> { - private static final int DEFAULT_LIMIT = 5; + private static final int DEFAULT_LIMIT = 5; - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public AutoCompleteResolver(@Nonnull final List> searchableEntities) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - } + public AutoCompleteResolver(@Nonnull final List> searchableEntities) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final AutoCompleteInput input = bindArgument(environment.getArgument("input"), AutoCompleteInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteInput input = + bindArgument(environment.getArgument("input"), AutoCompleteInput.class); - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - if (isBlank(sanitizedQuery)) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + if (isBlank(sanitizedQuery)) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug("Executing autocomplete. " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + final Filter filter = + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getRetrieverContext().orElseThrow().getAspectRetriever()); + final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + _logger.debug( + "Executing autocomplete. " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), input.getLimit())); - return _typeToEntity.get(input.getType()).autoComplete( - sanitizedQuery, - input.getField(), - filter, - limit, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + return _typeToEntity + .get(input.getType()) + .autoComplete( + sanitizedQuery, input.getField(), filter, limit, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), - input.getLimit()) + " " - + e.getMessage()); - throw new RuntimeException("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", - input.getType(), - input.getField(), - input.getQuery(), - input.getFilters(), - input.getLimit()), e); - } - }); - } + input.getLimit()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", + input.getType(), + input.getField(), + input.getQuery(), + input.getFilters(), + input.getLimit()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java index 40722211de8d3a..5b5888b89b241b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleInput; import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults; import com.linkedin.datahub.graphql.generated.AutoCompleteResultForEntity; @@ -14,69 +16,87 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - - public class AutocompleteUtils { private static final Logger _logger = LoggerFactory.getLogger(AutocompleteUtils.class.getName()); private static final int DEFAULT_LIMIT = 5; - private AutocompleteUtils() { } + private AutocompleteUtils() {} public static CompletableFuture batchGetAutocompleteResults( List> entities, String sanitizedQuery, AutoCompleteMultipleInput input, DataFetchingEnvironment environment, - @Nullable DataHubViewInfo view - ) { + @Nullable DataHubViewInfo view) { final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + final QueryContext context = environment.getContext(); - final List> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> { - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = view != null - ? SearchUtils.combineFilters(filter, view.getDefinition().getFilter()) - : filter; + final List> autoCompletesFuture = + entities.stream() + .map( + entity -> + GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Filter filter = + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()); + final Filter finalFilter = + view != null + ? SearchUtils.combineFilters( + filter, view.getDefinition().getFilter()) + : filter; - try { - final AutoCompleteResults searchResult = entity.autoComplete( - sanitizedQuery, - input.getField(), - finalFilter, - limit, - environment.getContext() - ); - return new AutoCompleteResultForEntity( - entity.type(), - searchResult.getSuggestions(), - searchResult.getEntities() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete all: " - + String.format("field %s, query %s, filters: %s, limit: %s", - input.getField(), - input.getQuery(), - filter, - input.getLimit()), e); - return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList()); - } - })).collect(Collectors.toList()); + try { + final AutoCompleteResults searchResult = + entity.autoComplete( + sanitizedQuery, + input.getField(), + finalFilter, + limit, + environment.getContext()); + return new AutoCompleteResultForEntity( + entity.type(), + searchResult.getSuggestions(), + searchResult.getEntities()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete all: " + + String.format( + "field %s, query %s, filters: %s, limit: %s", + input.getField(), + input.getQuery(), + filter, + input.getLimit()), + e); + return new AutoCompleteResultForEntity( + entity.type(), Collections.emptyList(), Collections.emptyList()); + } + }, + AutocompleteUtils.class.getSimpleName(), + "batchGetAutocompleteResults")) + .collect(Collectors.toList()); return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0])) - .thenApplyAsync((res) -> { - AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); - List suggestions = autoCompletesFuture.stream() - .map(CompletableFuture::join) - .filter( + .thenApplyAsync( + (res) -> { + AutoCompleteMultipleResults result = + new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); + List suggestions = + autoCompletesFuture.stream() + .map(CompletableFuture::join) + .filter( autoCompleteResultForEntity -> - autoCompleteResultForEntity.getSuggestions() != null && autoCompleteResultForEntity.getSuggestions().size() > 0 - ) - .collect(Collectors.toList()); - result.setSuggestions(suggestions); - return result; - }); + autoCompleteResultForEntity.getSuggestions() != null + && autoCompleteResultForEntity.getSuggestions().size() > 0) + .collect(Collectors.toList()); + result.setSuggestions(suggestions); + return result; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 17058fd8d7cffb..b07e3fa9126412 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -1,14 +1,19 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; -import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.search.AggregationMetadata; import com.linkedin.metadata.search.AggregationMetadataArray; @@ -18,26 +23,23 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; +import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor -public class GetQuickFiltersResolver implements DataFetcher> { +public class GetQuickFiltersResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -47,41 +49,58 @@ public class GetQuickFiltersResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { - final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); - - return CompletableFuture.supplyAsync(() -> { - final GetQuickFiltersResult result = new GetQuickFiltersResult(); - final List quickFilters = new ArrayList<>(); - - try { - final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input); - final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); - } catch (Exception e) { - log.error("Failed getting quick filters", e); - throw new RuntimeException("Failed to to get quick filters", e); - } - - result.setQuickFilters(quickFilters); - return result; - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final GetQuickFiltersInput input = + bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final GetQuickFiltersResult result = new GetQuickFiltersResult(); + final List quickFilters = new ArrayList<>(); + + try { + final SearchResult searchResult = + getSearchResults(context.getOperationContext(), input); + final AggregationMetadataArray aggregations = + searchResult.getMetadata().getAggregations(); + + quickFilters.addAll(getPlatformQuickFilters(context, aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(context, aggregations)); + } catch (Exception e) { + log.error("Failed getting quick filters", e); + throw new RuntimeException("Failed to to get quick filters", e); + } + + result.setQuickFilters(quickFilters); + return result; + }, + this.getClass().getSimpleName(), + "get"); } /** - * Do a star search with view filter applied to get info about all data in this instance. + * Do a star search with view filter applied to get info about all data in this instance. Include + * aggregations. */ - private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) - : null; - final List entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + private SearchResult getSearchResults( + @Nonnull final OperationContext opContext, @Nonnull final GetQuickFiltersInput input) + throws Exception { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView(opContext, _viewService, UrnUtils.getUrn(input.getViewUrn())) + : null; + final List entityNames = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( + opContext.withSearchFlags(flags -> flags.setSkipAggregates(false)), maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) : entityNames, "*", maybeResolvedView != null @@ -89,87 +108,110 @@ private SearchResult getSearchResults(@Nonnull final Authentication authenticati : null, 0, 0, - null, - null, - authentication); + Collections.emptyList(), + null); } /** - * Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters + * Get platforms and their count from an aggregations array, sorts by entity count, and map the + * top 5 to quick filters */ - private List getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getPlatformQuickFilters( + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List platforms = new ArrayList<>(); - final Optional platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); + final Optional platformAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); if (platformAggregations.isPresent()) { final List sortedPlatforms = - platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList()); - sortedPlatforms.forEach(platformFilter -> { - if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); - } - }); + platformAggregations.get().getFilterValues().stream() + .sorted(Comparator.comparingLong(val -> -val.getFacetCount())) + .collect(Collectors.toList()); + sortedPlatforms.forEach( + platformFilter -> { + if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { + platforms.add(mapQuickFilter(context, PLATFORM, platformFilter)); + } + }); } // return platforms sorted alphabetically by their name - return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList()); + return platforms.stream() + .sorted(Comparator.comparing(QuickFilter::getValue)) + .collect(Collectors.toList()); } /** - * Gets entity type quick filters from search aggregations. First, get source entity type quick filters - * from a prioritized list. Do the same for datathub entity types. + * Gets entity type quick filters from search aggregations. First, get source entity type quick + * filters from a prioritized list. Do the same for datathub entity types. */ - private List getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getEntityTypeQuickFilters( + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List entityTypes = new ArrayList<>(); - final Optional entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); + final Optional entityAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); if (entityAggregations.isPresent()) { final List sourceEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + context, + SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, + SOURCE_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(sourceEntityTypeFilters); final List dataHubEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + context, + SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, + DATAHUB_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(dataHubEntityTypeFilters); } return entityTypes; } /** - * Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined + * Create a quick filters list by looping over prioritized list and adding filters that exist + * until we reach the maxListSize defined */ private List getQuickFiltersFromList( + @Nullable QueryContext context, @Nonnull final List prioritizedList, final int maxListSize, - @Nonnull final AggregationMetadata entityAggregations - ) { + @Nonnull final AggregationMetadata entityAggregations) { final List entityTypes = new ArrayList<>(); - prioritizedList.forEach(entityType -> { - if (entityTypes.size() < maxListSize) { - final Optional entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst(); - if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); - } - } - }); + prioritizedList.forEach( + entityType -> { + if (entityTypes.size() < maxListSize) { + final Optional entityFilter = + entityAggregations.getFilterValues().stream() + .filter(val -> val.getValue().equals(entityType)) + .findFirst(); + if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { + entityTypes.add(mapQuickFilter(context, ENTITY_FILTER_NAME, entityFilter.get())); + } + } + }); return entityTypes; } - private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) { + private QuickFilter mapQuickFilter( + @Nullable QueryContext context, + @Nonnull final String field, + @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(convertFilterValue(filterValue.getValue(), isEntityTypeFilter)); if (filterValue.getEntity() != null) { - final Entity entity = UrnToEntityMapper.map(filterValue.getEntity()); + final Entity entity = UrnToEntityMapper.map(context, filterValue.getEntity()); quickFilter.setEntity(entity); } return quickFilter; } - /** - * If we're working with an entity type filter, we need to convert the value to an EntityType - */ + /** If we're working with an entity type filter, we need to convert the value to an EntityType */ public static String convertFilterValue(String filterValue, boolean isEntityType) { if (isEntityType) { return EntityTypeMapper.getType(filterValue).toString(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index d576ffc8ca2807..8b8b93353bc6e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -1,13 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ScrollAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.ScrollResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollResultsMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.SearchFlags; @@ -24,13 +28,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossEntitiesResolver implements DataFetcher> { @@ -48,57 +46,88 @@ public CompletableFuture get(DataFetchingEnvironment environment) bindArgument(environment.getArgument("input"), ScrollAcrossEntitiesInput.class); final List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - final List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + final List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - // escape forward slash since it is a reserved character in Elasticsearch, default to * if blank/empty - final String sanitizedQuery = StringUtils.isNotBlank(input.getQuery()) - ? ResolverUtils.escapeForwardSlash(input.getQuery()) : "*"; + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String sanitizedQuery = + StringUtils.isNotBlank(input.getQuery()) + ? ResolverUtils.escapeForwardSlash(input.getQuery()) + : "*"; - @Nullable - final String scrollId = input.getScrollId(); + @Nullable final String scrollId = input.getScrollId(); final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + context.getOperationContext(), + _viewService, + UrnUtils.getUrn(input.getViewUrn())) + : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } + final Filter baseFilter = + ResolverUtils.buildFilter( + null, input.getOrFilters(), context.getOperationContext().getAspectRetriever()); + final SearchFlags searchFlags; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); + } else { + searchFlags = null; + } - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; - return UrnScrollResultsMapper.map(_entityClient.scrollAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - scrollId, - keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), e); - } - }); + return UrnScrollResultsMapper.map( + context, + _entityClient.scrollAcrossEntities( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags), + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + scrollId, + keepAlive, + count)); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 78be1ac3096908..1b719b6f786205 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -1,17 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageInput; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.common.mappers.LineageFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnScrollAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.LineageFlags; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; @@ -25,13 +31,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossLineageResolver @@ -53,55 +53,116 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final String scrollId = input.getScrollId() != null ? input.getScrollId() : null; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); - final List facetFilters = filters.stream() - .map(AndFilterInput::getAnd) - .flatMap(List::stream) - .collect(Collectors.toList()); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); final Integer maxHops = getMaxHops(facetFilters); String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable - final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + Long endTimeMillis = + ResolverUtils.getLineageEndTimeMillis(input.getStartTimeMillis(), input.getEndTimeMillis()); + + final LineageFlags lineageFlags = LineageFlagsInputMapper.map(context, input.getLineageFlags()); + if (lineageFlags.getStartTimeMillis() == null && startTimeMillis != null) { + lineageFlags.setStartTimeMillis(startTimeMillis); + } + + if (lineageFlags.getEndTimeMillis() == null && endTimeMillis != null) { + lineageFlags.setEndTimeMillis(endTimeMillis); + } + ; com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - - SearchFlags searchFlags = null; - final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = new SearchFlags() - .setSkipCache(inputFlags.getSkipCache()) - .setFulltext(inputFlags.getFulltext()) - .setMaxAggValues(inputFlags.getMaxAggValues()); - } - return UrnScrollAcrossLineageResultsMapper.map( - _entityClient.scrollAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), null, scrollId, - keepAlive, count, startTimeMillis, endTimeMillis, searchFlags, ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - throw new RuntimeException("Failed to execute scroll across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + + final SearchFlags searchFlags; + final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = + input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = + new SearchFlags() + .setSkipCache(inputFlags.getSkipCache()) + .setFulltext(inputFlags.getFulltext()) + .setMaxAggValues(inputFlags.getMaxAggValues()); + } else { + searchFlags = null; + } + return UrnScrollAcrossLineageResultsMapper.map( + context, + _entityClient.scrollAcrossLineage( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags) + .withLineageFlags(flags -> lineageFlags != null ? lineageFlags : flags), + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + ResolverUtils.buildFilter( + facetFilters, + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()), + null, + scrollId, + keepAlive, + count)); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + throw new RuntimeException( + "Failed to execute scroll across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 1022b25b3cd992..0dbed92b7d58e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; @@ -14,18 +18,14 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossEntitiesResolver implements DataFetcher> { @@ -50,43 +50,90 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) : null; - - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - start, - count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + context.getOperationContext(), + _viewService, + UrnUtils.getUrn(input.getViewUrn())) + : null; + + final Filter baseFilter = + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()); + + SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); + List sortCriteria; + if (input.getSortInput() != null) { + if (input.getSortInput().getSortCriteria() != null) { + sortCriteria = + input.getSortInput().getSortCriteria().stream() + .map(SearchUtils::mapSortCriterion) + .collect(Collectors.toList()); + } else { + sortCriteria = + input.getSortInput().getSortCriterion() != null + ? Collections.singletonList( + mapSortCriterion(input.getSortInput().getSortCriterion())) + : Collections.emptyList(); + } + + } else { + sortCriteria = Collections.emptyList(); + } + + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + List finalEntities = + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames; + if (finalEntities.size() == 0) { + return SearchUtils.createEmptySearchResults(start, count); + } + + return UrnSearchResultsMapper.map( + context, + _entityClient.searchAcrossEntities( + context.getOperationContext().withSearchFlags(flags -> searchFlags), + finalEntities, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + start, + count, + sortCriteria)); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9f489183f4af72..dc3a1fc17e4ec9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -1,51 +1,89 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static com.linkedin.metadata.Constants.QUERY_ENTITY_NAME; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LineageDirection; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageInput; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.common.mappers.LineageFlagsInputMapper; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchAcrossLineageResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.LineageFlags; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.r2.RemoteInvocationException; +import graphql.VisibleForTesting; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nullable; -import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j -@RequiredArgsConstructor public class SearchAcrossLineageResolver implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; + private static final Set TRANSIENT_ENTITIES = ImmutableSet.of(QUERY_ENTITY_NAME); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + + @VisibleForTesting final Set _allEntities; + private final List _allowedEntities; + + public SearchAcrossLineageResolver(EntityClient entityClient, EntityRegistry entityRegistry) { + this._entityClient = entityClient; + this._entityRegistry = entityRegistry; + this._allEntities = + entityRegistry.getEntitySpecs().values().stream() + .map(EntitySpec::getName) + .collect(Collectors.toSet()); + + this._allowedEntities = + this._allEntities.stream() + .filter(e -> !TRANSIENT_ENTITIES.contains(e)) + .collect(Collectors.toList()); + } + + private List getEntityNamesFromInput(List inputTypes) { + if (inputTypes != null && !inputTypes.isEmpty()) { + return inputTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + } else { + return this._allowedEntities; + } + } + @Override public CompletableFuture get(DataFetchingEnvironment environment) throws URISyntaxException { log.debug("Entering search across lineage graphql resolver"); + final QueryContext context = environment.getContext(); + final SearchAcrossLineageInput input = bindArgument(environment.getArgument("input"), SearchAcrossLineageInput.class); @@ -53,77 +91,112 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); - List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List entityNames = getEntityNamesFromInput(input.getTypes()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getFilters() != null ? input.getFilters() : new ArrayList<>(); - final Integer maxHops = getMaxHops(filters); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); + final Integer maxHops = getMaxHops(facetFilters); @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable - final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); + Long endTimeMillis = + ResolverUtils.getLineageEndTimeMillis(input.getStartTimeMillis(), input.getEndTimeMillis()); + + final LineageFlags lineageFlags = LineageFlagsInputMapper.map(context, input.getLineageFlags()); + if (lineageFlags.getStartTimeMillis() == null && startTimeMillis != null) { + lineageFlags.setStartTimeMillis(startTimeMillis); + } + + if (lineageFlags.getEndTimeMillis() == null && endTimeMillis != null) { + lineageFlags.setEndTimeMillis(endTimeMillis); + } com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, - resolvedDirection, - input.getTypes(), - input.getQuery(), - filters, - start, - count); - - final Filter filter = - ResolverUtils.buildFilter( + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), filters, - input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - if (inputFlags.getSkipHighlighting() == null) { - searchFlags.setSkipHighlighting(true); - } - } else { - searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); - } - - return UrnSearchAcrossLineageResultsMapper.map( - _entityClient.searchAcrossLineage( + start, + count); + + final Filter filter = + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()); + final SearchFlags searchFlags; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); + if (inputFlags.getSkipHighlighting() == null) { + searchFlags.setSkipHighlighting(true); + } + } else { + searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); + } + LineageSearchResult salResults = + _entityClient.searchAcrossLineage( + context + .getOperationContext() + .withSearchFlags(flags -> searchFlags) + .withLineageFlags(flags -> lineageFlags), + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + filter, + null, + start, + count); + + return UrnSearchAcrossLineageResultsMapper.map(context, salResults); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", urn, resolvedDirection, - entityNames, - sanitizedQuery, - maxHops, - filter, - null, + input.getTypes(), + input.getQuery(), + filters, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); - throw new RuntimeException("Failed to execute search across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count), e); - } finally { - log.debug("Returning from search across lineage resolver"); - } - }); + count); + throw new RuntimeException( + "Failed to execute search across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + start, + count), + e); + } finally { + log.debug("Returning from search across lineage resolver"); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 0e66d6e601399c..7a48e305dbfe49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,36 +1,48 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.GroupingCriterion; +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; import com.linkedin.metadata.query.SearchFlags; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import io.opentelemetry.extension.annotations.WithSpan; +import java.util.Collections; import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - -/** - * Resolver responsible for resolving the 'search' field of the Query type - */ +/** Resolver responsible for resolving the 'search' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchResolver implements DataFetcher> { - private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = new SearchFlags() + private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = + new SearchFlags() .setFulltext(true) .setMaxAggValues(20) .setSkipCache(false) .setSkipAggregates(false) - .setSkipHighlighting(false); + .setSkipHighlighting(false) + .setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new GroupingCriterion() + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -39,6 +51,7 @@ public class SearchResolver implements DataFetcher get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); final SearchInput input = bindArgument(environment.getArgument("input"), SearchInput.class); final String entityName = EntityTypeMapper.getName(input.getType()); // escape forward slash since it is a reserved character in Elasticsearch @@ -49,27 +62,62 @@ public CompletableFuture get(DataFetchingEnvironment environment) final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } else { searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug( + "Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); - return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), - input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment), - searchFlags)); - } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags), e); - } - }); + return UrnSearchResultsMapper.map( + context, + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> searchFlags), + entityName, + sanitizedQuery, + ResolverUtils.buildFilter( + input.getFilters(), + input.getOrFilters(), + context.getOperationContext().getAspectRetriever()), + Collections.emptyList(), + start, + count)); + } catch (Exception e) { + log.error( + "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index fb146ef72877d1..04777c3fcdb4e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -1,12 +1,29 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -17,6 +34,7 @@ import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewInfo; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -28,31 +46,11 @@ import lombok.extern.slf4j.Slf4j; import org.codehaus.plexus.util.CollectionUtils; -import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; - - @Slf4j public class SearchUtils { - private SearchUtils() { - } + private SearchUtils() {} - /** - * Entities that are searched by default in Search Across Entities - */ + /** Entities that are searched by default in Search Across Entities */ public static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -74,12 +72,11 @@ private SearchUtils() { EntityType.CONTAINER, EntityType.DOMAIN, EntityType.DATA_PRODUCT, - EntityType.NOTEBOOK); - + EntityType.NOTEBOOK, + EntityType.BUSINESS_ATTRIBUTE, + EntityType.SCHEMA_FIELD); - /** - * Entities that are part of autocomplete by default in Auto Complete Across Entities - */ + /** Entities that are part of autocomplete by default in Auto Complete Across Entities */ public static final List AUTO_COMPLETE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -95,67 +92,83 @@ private SearchUtils() { EntityType.TAG, EntityType.CORP_USER, EntityType.CORP_GROUP, - EntityType.ROLE, EntityType.NOTEBOOK, - EntityType.DATA_PRODUCT); + EntityType.DATA_PRODUCT, + EntityType.DOMAIN, + EntityType.BUSINESS_ATTRIBUTE); - /** - * A prioritized list of source filter types used to generate quick filters - */ - public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( - DATASET_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME, - CHART_ENTITY_NAME, - CONTAINER_ENTITY_NAME, - ML_MODEL_ENTITY_NAME, - ML_MODEL_GROUP_ENTITY_NAME, - ML_FEATURE_ENTITY_NAME, - ML_FEATURE_TABLE_ENTITY_NAME, - ML_PRIMARY_KEY_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** Entities that are part of browse by default */ + public static final List BROWSE_ENTITY_TYPES = + ImmutableList.of( + EntityType.DATASET, + EntityType.DASHBOARD, + EntityType.CHART, + EntityType.CONTAINER, + EntityType.MLMODEL, + EntityType.MLMODEL_GROUP, + EntityType.MLFEATURE_TABLE, + EntityType.DATA_FLOW, + EntityType.DATA_JOB, + EntityType.NOTEBOOK); - /** - * A prioritized list of DataHub filter types used to generate quick filters - */ - public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of( - DOMAIN_ENTITY_NAME, - GLOSSARY_TERM_ENTITY_NAME, - CORP_GROUP_ENTITY_NAME, - CORP_USER_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of source filter types used to generate quick filters */ + public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = + Stream.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + CHART_ENTITY_NAME, + CONTAINER_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); + + /** A prioritized list of DataHub filter types used to generate quick filters */ + public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = + Stream.of( + DOMAIN_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + CORP_GROUP_ENTITY_NAME, + CORP_USER_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); /** - * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter} - * in disjunctive normal form. + * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link + * Filter} in disjunctive normal form. * * @param baseFilter the filter to apply the view to * @param viewFilter the view filter, null if it doesn't exist - * * @return a new instance of {@link Filter} representing the applied view. */ @Nonnull - public static Filter combineFilters(@Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { - final Filter finalBaseFilter = baseFilter == null - ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) - : baseFilter; + public static Filter combineFilters( + @Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { + final Filter finalBaseFilter = + baseFilter == null + ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) + : baseFilter; // Join the filter conditions in Disjunctive Normal Form. return combineFiltersInConjunction(finalBaseFilter, viewFilter); } /** - * Returns the intersection of two sets of entity types. (Really just string lists). - * If either is empty, consider the entity types list to mean "all" (take the other set). + * Returns the intersection of two sets of entity types. (Really just string lists). If either is + * empty, consider the entity types list to mean "all" (take the other set). * * @param baseEntityTypes the entity types to apply the view to * @param viewEntityTypes the view info, null if it doesn't exist - * * @return the intersection of the two input sets */ @Nonnull - public static List intersectEntityTypes(@Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { + public static List intersectEntityTypes( + @Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { if (baseEntityTypes.isEmpty()) { return viewEntityTypes; } @@ -171,126 +184,29 @@ public static List intersectEntityTypes(@Nonnull final List base * * @param filter1 the first filter in the pair * @param filter2 the second filter in the pair - * - * This method supports either Filter format, where the "or" field is used, instead - * of criteria. If the criteria filter is used, then it will be converted into an "OR" before - * returning the new filter. - * + *

This method supports either Filter format, where the "or" field is used, instead of + * criteria. If the criteria filter is used, then it will be converted into an "OR" before + * returning the new filter. * @return the result of joining the 2 filters in a conjunction (AND) - * - * How does it work? It basically cross-products the conjunctions inside of each Filter clause. - * - * Example Inputs: - * filter1 -> - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * } - * ] - * } - * ] - * } - * filter2 -> - * { - * or: [ - * { - * and: [ - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * }, - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term2"] - * } - * ] - * } - * ] - * } - * Example Output: - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * ] - * } + *

How does it work? It basically cross-products the conjunctions inside of each Filter + * clause. + *

Example Inputs: filter1 -> { or: [ { and: [ { field: tags, condition: EQUAL, values: + * ["urn:li:tag:tag"] } ] }, { and: [ { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] } ] } ] } filter2 -> { or: [ { and: [ { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] }, ] }, { and: [ { field: glossaryTerms, + * condition: EQUAL, values: ["urn:li:glossaryTerm:term2"] } ] } ] } Example Output: { or: [ { + * and: [ { field: tags, condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] } ] }, { and: [ { field: tags, + * condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: glossaryTerms, condition: EQUAL, + * values: ["urn:li:glosaryTerm:term2"] } ] }, { and: [ { field: glossaryTerm, condition: + * EQUAL, values: ["urn:li:glossaryTerm:term"] }, { field: domain, condition: EQUAL, values: + * ["urn:li:domain:domain"] } ] }, { and: [ { field: glossaryTerm, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] }, { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glosaryTerm:term2"] } ] }, ] } */ @Nonnull - private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, @Nonnull final Filter filter2) { + private static Filter combineFiltersInConjunction( + @Nonnull final Filter filter1, @Nonnull final Filter filter2) { final Filter finalFilter1 = convertToV2Filter(filter1); final Filter finalFilter2 = convertToV2Filter(filter2); @@ -310,7 +226,8 @@ private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, for (ConjunctiveCriterion conjunction2 : finalFilter2.getOr()) { final List joinedCriterion = new ArrayList<>(conjunction1.getAnd()); joinedCriterion.addAll(conjunction2.getAnd()); - ConjunctiveCriterion newConjunction = new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); + ConjunctiveCriterion newConjunction = + new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); newDisjunction.add(newConjunction); } } @@ -325,38 +242,45 @@ private static Filter convertToV2Filter(@Nonnull Filter filter) { } else if (filter.hasCriteria()) { // Convert criteria to an OR return new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(filter.getCriteria()) - ))); + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(filter.getCriteria())))); } throw new IllegalArgumentException( - String.format("Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", filter)); + String.format( + "Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", + filter)); } /** - * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the specified - * urn cannot be found. + * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the + * specified urn cannot be found. */ - public static DataHubViewInfo resolveView(@Nonnull ViewService viewService, @Nonnull final Urn viewUrn, - @Nonnull final Authentication authentication) { + public static DataHubViewInfo resolveView( + @Nonnull OperationContext opContext, + @Nonnull ViewService viewService, + @Nonnull final Urn viewUrn) { try { - DataHubViewInfo maybeViewInfo = viewService.getViewInfo(viewUrn, authentication); + DataHubViewInfo maybeViewInfo = viewService.getViewInfo(opContext, viewUrn); if (maybeViewInfo == null) { - log.warn(String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); + log.warn( + String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); } return maybeViewInfo; } catch (Exception e) { - throw new RuntimeException(String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), e); + throw new RuntimeException( + String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), + e); } } // Assumption is that filter values for degree are either null, 3+, 2, or 1. public static Integer getMaxHops(List filters) { - Set degreeFilterValues = filters.stream() - .filter(filter -> filter.getField().equals("degree")) - .flatMap(filter -> filter.getValues().stream()) - .collect(Collectors.toSet()); + Set degreeFilterValues = + filters.stream() + .filter(filter -> filter.getField().equals("degree")) + .flatMap(filter -> filter.getValues().stream()) + .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { if (degreeFilterValues.contains("2")) { @@ -368,15 +292,18 @@ public static Integer getMaxHops(List filters) { return maxHops; } - public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { + public static SearchFlags mapInputFlags( + @Nullable QueryContext context, + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } return searchFlags; } - public static SortCriterion mapSortCriterion(com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { + public static SortCriterion mapSortCriterion( + com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { SortCriterion result = new SortCriterion(); result.setField(sortCriterion.getField()); result.setOrder(SortOrder.valueOf(sortCriterion.getSortOrder().name())); @@ -388,4 +315,15 @@ public static List getEntityNames(List inputTypes) { (inputTypes == null || inputTypes.isEmpty()) ? SEARCHABLE_ENTITY_TYPES : inputTypes; return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } -} \ No newline at end of file + + public static SearchResults createEmptySearchResults(final int start, final int count) { + final SearchResults result = new SearchResults(); + result.setStart(start); + result.setCount(count); + result.setTotal(0); + result.setSearchResults(new ArrayList<>()); + result.setSuggestions(new ArrayList<>()); + result.setFacets(new ArrayList<>()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java new file mode 100644 index 00000000000000..84d3bcd7b376c0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/DocPropagationSettingsResolver.java @@ -0,0 +1,57 @@ +package com.linkedin.datahub.graphql.resolvers.settings.docPropagation; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.generated.DocPropagationSettings; +import com.linkedin.metadata.service.SettingsService; +import com.linkedin.settings.global.GlobalSettingsInfo; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +/** Retrieves the Global Settings related to the Actions feature. */ +@Slf4j +public class DocPropagationSettingsResolver + implements DataFetcher> { + + private final SettingsService _settingsService; + + public DocPropagationSettingsResolver(final SettingsService settingsService) { + _settingsService = Objects.requireNonNull(settingsService, "settingsService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getOperationContext()); + final DocPropagationSettings defaultSettings = new DocPropagationSettings(); + defaultSettings.setDocColumnPropagation(true); + return globalSettings != null && globalSettings.hasDocPropagation() + ? mapDocPropagationSettings(globalSettings.getDocPropagation()) + : defaultSettings; + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Action Settings", e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private static DocPropagationSettings mapDocPropagationSettings( + @Nonnull final com.linkedin.settings.global.DocPropagationFeatureSettings settings) { + final DocPropagationSettings result = new DocPropagationSettings(); + + // Map docColumnPropagation settings field + result.setDocColumnPropagation(settings.isColumnPropagationEnabled()); + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/UpdateDocPropagationSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/UpdateDocPropagationSettingsResolver.java new file mode 100644 index 00000000000000..198c36faad0bd8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/docPropagation/UpdateDocPropagationSettingsResolver.java @@ -0,0 +1,77 @@ +package com.linkedin.datahub.graphql.resolvers.settings.docPropagation; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateDocPropagationSettingsInput; +import com.linkedin.metadata.service.SettingsService; +import com.linkedin.settings.global.DocPropagationFeatureSettings; +import com.linkedin.settings.global.GlobalSettingsInfo; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +/** Resolver responsible for updating the actions settings. */ +public class UpdateDocPropagationSettingsResolver + implements DataFetcher> { + + private final SettingsService _settingsService; + + public UpdateDocPropagationSettingsResolver(@Nonnull final SettingsService settingsService) { + _settingsService = Objects.requireNonNull(settingsService, "settingsService must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + final UpdateDocPropagationSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateDocPropagationSettingsInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (AuthorizationUtils.canManageFeatures(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getOperationContext()); + + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); + + final DocPropagationFeatureSettings newDocPropagationSettings = + newGlobalSettings.hasDocPropagation() + ? newGlobalSettings.getDocPropagation() + : new DocPropagationFeatureSettings().setEnabled(true); + + // Next, patch the actions settings. + updateDocPropagationSettings(newDocPropagationSettings, input); + newGlobalSettings.setDocPropagation(newDocPropagationSettings); + + // Finally, write back to GMS. + _settingsService.updateGlobalSettings( + context.getOperationContext(), newGlobalSettings); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update action settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); + } + + private static void updateDocPropagationSettings( + @Nonnull final com.linkedin.settings.global.DocPropagationFeatureSettings settings, + @Nonnull final UpdateDocPropagationSettingsInput input) { + settings.setColumnPropagationEnabled(input.getDocColumnPropagation()); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java index 8c21277b66a69f..3d9488a954af1d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.UpdateCorpUserViewsSettingsInput; import com.linkedin.identity.CorpUserAppearanceSettings; import com.linkedin.identity.CorpUserSettings; @@ -16,58 +19,63 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating the authenticated user's View-specific settings. - */ +/** Resolver responsible for updating the authenticated user's View-specific settings. */ @Slf4j @RequiredArgsConstructor -public class UpdateCorpUserViewsSettingsResolver implements DataFetcher> { +public class UpdateCorpUserViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateCorpUserViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); + final UpdateCorpUserViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { - final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); + final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); - final CorpUserSettings maybeSettings = _settingsService.getCorpUserSettings( - userUrn, - context.getAuthentication() - ); + final CorpUserSettings maybeSettings = + _settingsService.getCorpUserSettings(context.getOperationContext(), userUrn); - final CorpUserSettings newSettings = maybeSettings == null - ? new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) - : maybeSettings; + final CorpUserSettings newSettings = + maybeSettings == null + ? new CorpUserSettings() + .setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) + : maybeSettings; - // Patch the new corp user settings. This does a R-M-F. - updateCorpUserSettings(newSettings, input); + // Patch the new corp user settings. This does a R-M-F. + updateCorpUserSettings(newSettings, input); - _settingsService.updateCorpUserSettings( - userUrn, - newSettings, - context.getAuthentication() - ); - return true; - } catch (Exception e) { - log.error("Failed to perform user view settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update to user view settings against input %s", input.toString()), e); - } - }); + _settingsService.updateCorpUserSettings( + context.getOperationContext(), userUrn, newSettings); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user view settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform update to user view settings against input %s", + input.toString()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } private static void updateCorpUserSettings( @Nonnull final CorpUserSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - final CorpUserViewsSettings newViewSettings = settings.hasViews() - ? settings.getViews() - : new CorpUserViewsSettings(); + final CorpUserViewsSettings newViewSettings = + settings.hasViews() ? settings.getViews() : new CorpUserViewsSettings(); updateCorpUserViewsSettings(newViewSettings, input); settings.setViews(newViewSettings); } @@ -75,9 +83,8 @@ private static void updateCorpUserSettings( private static void updateCorpUserViewsSettings( @Nonnull final CorpUserViewsSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java index f1aba3d9247c58..079799a269b01e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.GlobalViewsSettings; import com.linkedin.metadata.service.SettingsService; import com.linkedin.settings.global.GlobalSettingsInfo; @@ -14,11 +15,11 @@ /** * Retrieves the Global Settings related to the Views feature. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ @Slf4j -public class GlobalViewsSettingsResolver implements - DataFetcher> { +public class GlobalViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @@ -27,25 +28,31 @@ public GlobalViewsSettingsResolver(final SettingsService settingsService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); - return globalSettings != null && globalSettings.hasViews() - ? mapGlobalViewsSettings(globalSettings.getViews()) - : new GlobalViewsSettings(); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve Global Views Settings", e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getOperationContext()); + return globalSettings != null && globalSettings.hasViews() + ? mapGlobalViewsSettings(globalSettings.getViews()) + : new GlobalViewsSettings(); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Global Views Settings", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private static GlobalViewsSettings mapGlobalViewsSettings(@Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { + private static GlobalViewsSettings mapGlobalViewsSettings( + @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { final GlobalViewsSettings result = new GlobalViewsSettings(); if (settings.hasDefaultView()) { result.setDefaultView(settings.getDefaultView().toString()); } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java index c90ec04b3a2dfc..9ef6f9474fa510 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateGlobalViewsSettingsInput; import com.linkedin.metadata.service.SettingsService; @@ -15,13 +18,10 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Resolver responsible for updating the Global Views settings. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ public class UpdateGlobalViewsSettingsResolver implements DataFetcher> { @@ -32,45 +32,53 @@ public UpdateGlobalViewsSettingsResolver(@Nonnull final SettingsService settings } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateGlobalViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); + final UpdateGlobalViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (AuthorizationUtils.canManageGlobalViews(context)) { - try { - // First, fetch the existing global settings. This does a R-M-F. - final GlobalSettingsInfo maybeGlobalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (AuthorizationUtils.canManageGlobalViews(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getOperationContext()); - final GlobalSettingsInfo newGlobalSettings = maybeGlobalSettings != null - ? maybeGlobalSettings - : new GlobalSettingsInfo(); + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); - final GlobalViewsSettings newGlobalViewsSettings = newGlobalSettings.hasViews() - ? newGlobalSettings.getViews() - : new GlobalViewsSettings(); + final GlobalViewsSettings newGlobalViewsSettings = + newGlobalSettings.hasViews() + ? newGlobalSettings.getViews() + : new GlobalViewsSettings(); - // Next, patch the global views settings. - updateViewsSettings(newGlobalViewsSettings, input); - newGlobalSettings.setViews(newGlobalViewsSettings); + // Next, patch the global views settings. + updateViewsSettings(newGlobalViewsSettings, input); + newGlobalSettings.setViews(newGlobalViewsSettings); - // Finally, write back to GMS. - _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update global view settings! %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Finally, write back to GMS. + _settingsService.updateGlobalSettings( + context.getOperationContext(), newGlobalSettings); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update global view settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private static void updateViewsSettings( @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings, @Nonnull final UpdateGlobalViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java index 0e93cdfb231fa4..afcc28e1fb2784 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java @@ -1,9 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.EntityKeyUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BatchGetStepStatesInput; import com.linkedin.datahub.graphql.generated.BatchGetStepStatesResult; import com.linkedin.datahub.graphql.generated.StepStateResult; @@ -16,6 +21,7 @@ import com.linkedin.step.DataHubStepStateProperties; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -27,64 +33,71 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.EntityKeyUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchGetStepStatesResolver implements DataFetcher> { +public class BatchGetStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); final BatchGetStepStatesInput input = bindArgument(environment.getArgument("input"), BatchGetStepStatesInput.class); - return CompletableFuture.supplyAsync(() -> { - Map urnsToIdsMap; - Set urns; - Map entityResponseMap; + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + Map urnsToIdsMap; + Set urns; + Map entityResponseMap; - try { - urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); - urns = urnsToIdsMap.keySet(); - entityResponseMap = _entityClient.batchGetV2(DATAHUB_STEP_STATE_ENTITY_NAME, urns, - ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), authentication); - } catch (Exception e) { - throw new RuntimeException(e); - } + try { + urnsToIdsMap = buildUrnToIdMap(context.getOperationContext(), input.getIds()); + urns = urnsToIdsMap.keySet(); + entityResponseMap = + _entityClient.batchGetV2( + context.getOperationContext(), + DATAHUB_STEP_STATE_ENTITY_NAME, + urns, + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)); + } catch (Exception e) { + throw new RuntimeException(e); + } - final Map stepStatePropertiesMap = new HashMap<>(); - for (Map.Entry entry : entityResponseMap.entrySet()) { - final Urn urn = entry.getKey(); - final DataHubStepStateProperties stepStateProperties = getStepStateProperties(urn, entry.getValue()); - if (stepStateProperties != null) { - stepStatePropertiesMap.put(urn, stepStateProperties); - } - } + final Map stepStatePropertiesMap = new HashMap<>(); + for (Map.Entry entry : entityResponseMap.entrySet()) { + final Urn urn = entry.getKey(); + final DataHubStepStateProperties stepStateProperties = + getStepStateProperties(urn, entry.getValue()); + if (stepStateProperties != null) { + stepStatePropertiesMap.put(urn, stepStateProperties); + } + } - final List results = stepStatePropertiesMap.entrySet() - .stream() - .map(entry -> buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); - result.setResults(results); - return result; - }); + final List results = + stepStatePropertiesMap.entrySet().stream() + .map( + entry -> + buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); + result.setResults(results); + return result; + }, + this.getClass().getSimpleName(), + "get"); } @Nonnull - private Map buildUrnToIdMap(@Nonnull final List ids, @Nonnull final Authentication authentication) + private Map buildUrnToIdMap( + @Nonnull OperationContext opContext, @Nonnull final List ids) throws RemoteInvocationException { final Map urnToIdMap = new HashMap<>(); for (final String id : ids) { final Urn urn = getStepStateUrn(id); - if (_entityClient.exists(urn, authentication)) { + if (_entityClient.exists(opContext, urn)) { urnToIdMap.put(urn, id); } } @@ -99,37 +112,37 @@ private Urn getStepStateUrn(@Nonnull final String id) { } @Nullable - private DataHubStepStateProperties getStepStateProperties(@Nonnull final Urn urn, - @Nonnull final EntityResponse entityResponse) { + private DataHubStepStateProperties getStepStateProperties( + @Nonnull final Urn urn, @Nonnull final EntityResponse entityResponse) { final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); // If aspect is not present, log the error and return null. if (!aspectMap.containsKey(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)) { log.error("Failed to find step state properties for urn: " + urn); return null; } - return new DataHubStepStateProperties(aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); + return new DataHubStepStateProperties( + aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); } @Nonnull - private StepStateResult buildStepStateResult(@Nonnull final String id, - @Nonnull final DataHubStepStateProperties stepStateProperties) { + private StepStateResult buildStepStateResult( + @Nonnull final String id, @Nonnull final DataHubStepStateProperties stepStateProperties) { final StepStateResult result = new StepStateResult(); result.setId(id); - final List mappedProperties = stepStateProperties - .getProperties() - .entrySet() - .stream() - .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + final List mappedProperties = + stepStateProperties.getProperties().entrySet().stream() + .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); result.setProperties(mappedProperties); return result; } @Nonnull - private StringMapEntry buildStringMapEntry(@Nonnull final String key, @Nonnull final String value) { + private StringMapEntry buildStringMapEntry( + @Nonnull final String key, @Nonnull final String value) { final StringMapEntry entry = new StringMapEntry(); entry.setKey(key); entry.setValue(value); return entry; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java index e4c21207ddd344..5b8481291f1054 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java @@ -1,11 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.BatchUpdateStepStatesInput; import com.linkedin.datahub.graphql.generated.BatchUpdateStepStatesResult; import com.linkedin.datahub.graphql.generated.StepStateInput; @@ -17,6 +22,7 @@ import com.linkedin.step.DataHubStepStateProperties; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; @@ -25,19 +31,15 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchUpdateStepStatesResolver implements DataFetcher> { +public class BatchUpdateStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,48 +48,64 @@ public CompletableFuture get(@Nonnull final DataFet final List states = input.getStates(); final String actorUrnStr = authentication.getActor().toUrnStr(); - return CompletableFuture.supplyAsync(() -> { - final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); - final AuditStamp auditStamp = new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); - final List results = states - .stream() - .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) - .collect(Collectors.toList()); - final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); - result.setResults(results); - return result; - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); + final AuditStamp auditStamp = + new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); + final List results = + states.stream() + .map( + state -> + buildUpdateStepStateResult( + context.getOperationContext(), state, auditStamp)) + .collect(Collectors.toList()); + final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); + result.setResults(results); + return result; + }, + this.getClass().getSimpleName(), + "get"); } - private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStateInput state, - @Nonnull final AuditStamp auditStamp, - @Nonnull final Authentication authentication) { + private UpdateStepStateResult buildUpdateStepStateResult( + @Nonnull OperationContext opContext, + @Nonnull final StepStateInput state, + @Nonnull final AuditStamp auditStamp) { final String id = state.getId(); final UpdateStepStateResult updateStepStateResult = new UpdateStepStateResult(); updateStepStateResult.setId(id); - final boolean success = updateStepState(id, state.getProperties(), auditStamp, authentication); + final boolean success = updateStepState(opContext, id, state.getProperties(), auditStamp); updateStepStateResult.setSucceeded(success); return updateStepStateResult; } - private boolean updateStepState(@Nonnull final String id, - @Nonnull final List inputProperties, @Nonnull final AuditStamp auditStamp, - @Nonnull final Authentication authentication) { + private boolean updateStepState( + @Nonnull OperationContext opContext, + @Nonnull final String id, + @Nonnull final List inputProperties, + @Nonnull final AuditStamp auditStamp) { final Map properties = - inputProperties.stream().collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + inputProperties.stream() + .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); try { final DataHubStepStateKey stepStateKey = new DataHubStepStateKey().setId(id); final DataHubStepStateProperties stepStateProperties = - new DataHubStepStateProperties().setProperties(new StringMap(properties)).setLastModified(auditStamp); + new DataHubStepStateProperties() + .setProperties(new StringMap(properties)) + .setLastModified(auditStamp); final MetadataChangeProposal proposal = - buildMetadataChangeProposal(DATAHUB_STEP_STATE_ENTITY_NAME, stepStateKey, - DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, stepStateProperties); - _entityClient.ingestProposal(proposal, authentication, false); + buildMetadataChangeProposal( + DATAHUB_STEP_STATE_ENTITY_NAME, + stepStateKey, + DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, + stepStateProperties); + _entityClient.ingestProposal(opContext, proposal, false); return true; } catch (Exception e) { log.error("Could not update step state for id {}", id, e); return false; } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java new file mode 100644 index 00000000000000..3be7ea505abbf3 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolver.java @@ -0,0 +1,136 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.CreateStructuredPropertyInput; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.aspect.patch.builder.StructuredPropertyDefinitionPatchBuilder; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import com.linkedin.structured.StructuredPropertyKey; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.UUID; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class CreateStructuredPropertyResolver + implements DataFetcher> { + + private final EntityClient _entityClient; + + public CreateStructuredPropertyResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final CreateStructuredPropertyInput input = + bindArgument(environment.getArgument("input"), CreateStructuredPropertyInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.canManageStructuredProperties(context)) { + throw new AuthorizationException( + "Unable to create structured property. Please contact your admin."); + } + final StructuredPropertyKey key = new StructuredPropertyKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + final Urn propertyUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, STRUCTURED_PROPERTY_ENTITY_NAME); + StructuredPropertyDefinitionPatchBuilder builder = + new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); + + builder.setQualifiedName(input.getQualifiedName()); + builder.setValueType(input.getValueType()); + input.getEntityTypes().forEach(builder::addEntityType); + if (input.getDisplayName() != null) { + builder.setDisplayName(input.getDisplayName()); + } + if (input.getDescription() != null) { + builder.setDescription(input.getDescription()); + } + if (input.getImmutable() != null) { + builder.setImmutable(input.getImmutable()); + } + if (input.getTypeQualifier() != null) { + buildTypeQualifier(input, builder); + } + if (input.getAllowedValues() != null) { + buildAllowedValues(input, builder); + } + if (input.getCardinality() != null) { + builder.setCardinality( + PropertyCardinality.valueOf(input.getCardinality().toString())); + } + + MetadataChangeProposal mcp = builder.build(); + _entityClient.ingestProposal(context.getOperationContext(), mcp, false); + + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), + STRUCTURED_PROPERTY_ENTITY_NAME, + propertyUrn, + null); + return StructuredPropertyMapper.map(context, response); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } + + private void buildTypeQualifier( + @Nonnull final CreateStructuredPropertyInput input, + @Nonnull final StructuredPropertyDefinitionPatchBuilder builder) { + if (input.getTypeQualifier().getAllowedTypes() != null) { + final StringArrayMap typeQualifier = new StringArrayMap(); + StringArray allowedTypes = new StringArray(); + allowedTypes.addAll(input.getTypeQualifier().getAllowedTypes()); + typeQualifier.put("allowedTypes", allowedTypes); + builder.setTypeQualifier(typeQualifier); + } + } + + private void buildAllowedValues( + @Nonnull final CreateStructuredPropertyInput input, + @Nonnull final StructuredPropertyDefinitionPatchBuilder builder) { + input + .getAllowedValues() + .forEach( + allowedValueInput -> { + PropertyValue value = new PropertyValue(); + PrimitivePropertyValue primitiveValue = new PrimitivePropertyValue(); + if (allowedValueInput.getStringValue() != null) { + primitiveValue.setString(allowedValueInput.getStringValue()); + } + if (allowedValueInput.getNumberValue() != null) { + primitiveValue.setDouble(allowedValueInput.getNumberValue().doubleValue()); + } + value.setValue(primitiveValue); + value.setDescription(allowedValueInput.getDescription(), SetMode.IGNORE_NULL); + builder.addAllowedValue(value); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java new file mode 100644 index 00000000000000..ea8c6dac36a4af --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolver.java @@ -0,0 +1,103 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.RemoveStructuredPropertiesInput; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.patch.builder.StructuredPropertiesPatchBuilder; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.StructuredProperties; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class RemoveStructuredPropertiesResolver + implements DataFetcher< + CompletableFuture> { + + private final EntityClient _entityClient; + + public RemoveStructuredPropertiesResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + + final RemoveStructuredPropertiesInput input = + bindArgument(environment.getArgument("input"), RemoveStructuredPropertiesInput.class); + final Urn assetUrn = UrnUtils.getUrn(input.getAssetUrn()); + + return CompletableFuture.supplyAsync( + () -> { + try { + // check authorization first + if (!AuthorizationUtils.canEditProperties(assetUrn, context)) { + throw new AuthorizationException( + String.format( + "Not authorized to update properties on the gives urn %s", assetUrn)); + } + + if (!_entityClient.exists(context.getOperationContext(), assetUrn)) { + throw new RuntimeException( + String.format("Asset with provided urn %s does not exist", assetUrn)); + } + + StructuredPropertiesPatchBuilder patchBuilder = + new StructuredPropertiesPatchBuilder().urn(assetUrn); + + input + .getStructuredPropertyUrns() + .forEach( + propertyUrn -> { + patchBuilder.removeProperty(UrnUtils.getUrn(propertyUrn)); + }); + + // ingest change proposal + final MetadataChangeProposal structuredPropertiesProposal = patchBuilder.build(); + + _entityClient.ingestProposal( + context.getOperationContext(), structuredPropertiesProposal, false); + + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), + assetUrn.getEntityType(), + assetUrn, + ImmutableSet.of(Constants.STRUCTURED_PROPERTIES_ASPECT_NAME)); + + if (response == null + || response.getAspects().get(Constants.STRUCTURED_PROPERTIES_ASPECT_NAME) == null) { + throw new RuntimeException( + String.format("Failed to fetch structured properties from entity %s", assetUrn)); + } + + StructuredProperties structuredProperties = + new StructuredProperties( + response + .getAspects() + .get(Constants.STRUCTURED_PROPERTIES_ASPECT_NAME) + .getValue() + .data()); + + return StructuredPropertiesMapper.map(context, structuredProperties); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolver.java new file mode 100644 index 00000000000000..2549f303bacd95 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolver.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.UpdateStructuredPropertyInput; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertyMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.aspect.patch.builder.StructuredPropertyDefinitionPatchBuilder; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PropertyCardinality; +import com.linkedin.structured.PropertyValue; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; + +public class UpdateStructuredPropertyResolver + implements DataFetcher> { + + private final EntityClient _entityClient; + + public UpdateStructuredPropertyResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final QueryContext context = environment.getContext(); + + final UpdateStructuredPropertyInput input = + bindArgument(environment.getArgument("input"), UpdateStructuredPropertyInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + if (!AuthorizationUtils.canManageStructuredProperties(context)) { + throw new AuthorizationException( + "Unable to update structured property. Please contact your admin."); + } + final Urn propertyUrn = UrnUtils.getUrn(input.getUrn()); + StructuredPropertyDefinitionPatchBuilder builder = + new StructuredPropertyDefinitionPatchBuilder().urn(propertyUrn); + + if (input.getDisplayName() != null) { + builder.setDisplayName(input.getDisplayName()); + } + if (input.getDescription() != null) { + builder.setDescription(input.getDescription()); + } + if (input.getImmutable() != null) { + builder.setImmutable(input.getImmutable()); + } + if (input.getTypeQualifier() != null) { + buildTypeQualifier(input, builder); + } + if (input.getNewAllowedValues() != null) { + buildAllowedValues(input, builder); + } + if (input.getSetCardinalityAsMultiple() != null) { + builder.setCardinality(PropertyCardinality.MULTIPLE); + } + if (input.getNewEntityTypes() != null) { + input.getNewEntityTypes().forEach(builder::addEntityType); + } + + MetadataChangeProposal mcp = builder.build(); + _entityClient.ingestProposal(context.getOperationContext(), mcp, false); + + EntityResponse response = + _entityClient.getV2( + context.getOperationContext(), + STRUCTURED_PROPERTY_ENTITY_NAME, + propertyUrn, + null); + return StructuredPropertyMapper.map(context, response); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); + } + + private void buildTypeQualifier( + @Nonnull final UpdateStructuredPropertyInput input, + @Nonnull final StructuredPropertyDefinitionPatchBuilder builder) { + if (input.getTypeQualifier().getNewAllowedTypes() != null) { + final StringArrayMap typeQualifier = new StringArrayMap(); + StringArray allowedTypes = new StringArray(); + allowedTypes.addAll(input.getTypeQualifier().getNewAllowedTypes()); + typeQualifier.put("allowedTypes", allowedTypes); + builder.setTypeQualifier(typeQualifier); + } + } + + private void buildAllowedValues( + @Nonnull final UpdateStructuredPropertyInput input, + @Nonnull final StructuredPropertyDefinitionPatchBuilder builder) { + input + .getNewAllowedValues() + .forEach( + allowedValueInput -> { + PropertyValue value = new PropertyValue(); + PrimitivePropertyValue primitiveValue = new PrimitivePropertyValue(); + if (allowedValueInput.getStringValue() != null) { + primitiveValue.setString(allowedValueInput.getStringValue()); + } + if (allowedValueInput.getNumberValue() != null) { + primitiveValue.setDouble(allowedValueInput.getNumberValue().doubleValue()); + } + value.setValue(primitiveValue); + value.setDescription(allowedValueInput.getDescription(), SetMode.IGNORE_NULL); + builder.addAllowedValue(value); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java new file mode 100644 index 00000000000000..5f69512e5946b0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java @@ -0,0 +1,177 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.datahub.graphql.generated.UpsertStructuredPropertiesInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.StructuredPropertyUtils; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import graphql.com.google.common.collect.ImmutableSet; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class UpsertStructuredPropertiesResolver + implements DataFetcher< + CompletableFuture> { + + private final EntityClient _entityClient; + + public UpsertStructuredPropertiesResolver(@Nonnull final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + + final UpsertStructuredPropertiesInput input = + bindArgument(environment.getArgument("input"), UpsertStructuredPropertiesInput.class); + final Urn assetUrn = UrnUtils.getUrn(input.getAssetUrn()); + Map> updateMap = new HashMap<>(); + // create a map of updates from our input + input + .getStructuredPropertyInputParams() + .forEach(param -> updateMap.put(param.getStructuredPropertyUrn(), param.getValues())); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // check authorization first + if (!AuthorizationUtils.canEditProperties(assetUrn, context)) { + throw new AuthorizationException( + String.format( + "Not authorized to update properties on the gives urn %s", assetUrn)); + } + + final AuditStamp auditStamp = + AuditStampUtils.createAuditStamp(authentication.getActor().toUrnStr()); + + if (!_entityClient.exists(context.getOperationContext(), assetUrn)) { + throw new RuntimeException( + String.format("Asset with provided urn %s does not exist", assetUrn)); + } + + // get or default the structured properties aspect + StructuredProperties structuredProperties = + getStructuredProperties(context.getOperationContext(), assetUrn); + + // update the existing properties based on new value + StructuredPropertyValueAssignmentArray properties = + updateExistingProperties(structuredProperties, updateMap, auditStamp); + + // append any new properties from our input + addNewProperties(properties, updateMap, auditStamp); + + structuredProperties.setProperties(properties); + + // ingest change proposal + final MetadataChangeProposal structuredPropertiesProposal = + AspectUtils.buildMetadataChangeProposal( + assetUrn, STRUCTURED_PROPERTIES_ASPECT_NAME, structuredProperties); + + _entityClient.ingestProposal( + context.getOperationContext(), structuredPropertiesProposal, false); + + return StructuredPropertiesMapper.map(context, structuredProperties); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }, + this.getClass().getSimpleName(), + "get"); + } + + private StructuredProperties getStructuredProperties( + @Nonnull OperationContext opContext, Urn assetUrn) throws Exception { + EntityResponse response = + _entityClient.getV2( + opContext, + assetUrn.getEntityType(), + assetUrn, + ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME)); + StructuredProperties structuredProperties = new StructuredProperties(); + structuredProperties.setProperties(new StructuredPropertyValueAssignmentArray()); + if (response != null && response.getAspects().containsKey(STRUCTURED_PROPERTIES_ASPECT_NAME)) { + structuredProperties = + new StructuredProperties( + response.getAspects().get(STRUCTURED_PROPERTIES_ASPECT_NAME).getValue().data()); + } + return structuredProperties; + } + + private StructuredPropertyValueAssignmentArray updateExistingProperties( + StructuredProperties structuredProperties, + Map> updateMap, + AuditStamp auditStamp) { + return new StructuredPropertyValueAssignmentArray( + structuredProperties.getProperties().stream() + .map( + propAssignment -> { + String propUrnString = propAssignment.getPropertyUrn().toString(); + if (updateMap.containsKey(propUrnString)) { + List valueList = updateMap.get(propUrnString); + PrimitivePropertyValueArray values = + new PrimitivePropertyValueArray( + valueList.stream() + .map(StructuredPropertyUtils::mapPropertyValueInput) + .collect(Collectors.toList())); + propAssignment.setValues(values); + propAssignment.setLastModified(auditStamp); + } + return propAssignment; + }) + .collect(Collectors.toList())); + } + + private void addNewProperties( + StructuredPropertyValueAssignmentArray properties, + Map> updateMap, + AuditStamp auditStamp) { + // first remove existing properties from updateMap so that we append only new properties + properties.forEach(prop -> updateMap.remove(prop.getPropertyUrn().toString())); + + updateMap.forEach( + (structuredPropUrn, values) -> { + StructuredPropertyValueAssignment valueAssignment = + new StructuredPropertyValueAssignment(); + valueAssignment.setPropertyUrn(UrnUtils.getUrn(structuredPropUrn)); + valueAssignment.setValues( + new PrimitivePropertyValueArray( + values.stream() + .map(StructuredPropertyUtils::mapPropertyValueInput) + .collect(Collectors.toList()))); + valueAssignment.setCreated(auditStamp); + valueAssignment.setLastModified(auditStamp); + properties.add(valueAssignment); + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 9ee24e6941017d..6681fde76cf784 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,13 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.tag; -import com.linkedin.common.urn.UrnUtils; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; @@ -22,14 +25,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS privilege. + * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -42,43 +40,54 @@ public class CreateTagResolver implements DataFetcher> public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateTagInput input = bindArgument(environment.getArgument("input"), CreateTagInput.class); - - return CompletableFuture.supplyAsync(() -> { + final CreateTagInput input = + bindArgument(environment.getArgument("input"), CreateTagInput.class); - if (!AuthorizationUtils.canCreateTags(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateTags(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - // Create the Tag Key - final TagKey key = new TagKey(); + try { + // Create the Tag Key + final TagKey key = new TagKey(); - // Take user provided id OR generate a random UUID for the Tag. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); + // Take user provided id OR generate a random UUID for the Tag. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Tag already exists!"); - } + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME))) { + throw new IllegalArgumentException("This Tag already exists!"); + } - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); - String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); + String tagUrn = + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); - OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return tagUrn; - } catch (Exception e) { - log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + OwnerUtils.addCreatorAsOwner( + context, tagUrn, OwnerEntityType.CORP_USER, _entityService); + return tagUrn; + } catch (Exception e) { + log.error( + "Failed to create Tag with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), + e); + } + }, + this.getClass().getSimpleName(), + "get"); } private TagProperties mapTagProperties(final CreateTagInput input) { @@ -87,4 +96,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java index e6c3cf49df8db4..2dabae55dd3e7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java @@ -4,6 +4,7 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; @@ -11,10 +12,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteTagResolver implements DataFetcher> { @@ -25,33 +23,43 @@ public DeleteTagResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String tagUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(tagUrn); - return CompletableFuture.supplyAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (AuthorizationUtils.canManageTags(context) + || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { + try { + _entityClient.deleteEntity(context.getOperationContext(), urn); - if (AuthorizationUtils.canManageTags(context) || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(context.getOperationContext(), urn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Tag with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format( - "Caught exception while attempting to clear all entity references for Tag with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", tagUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", tagUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index e2aa5905be8bd3..7a059ed9a1aeda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,13 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -21,19 +25,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -42,48 +44,61 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn tagUrn = Urn.createFromString(environment.getArgument("urn")); final String colorHex = environment.getArgument("colorHex"); - return CompletableFuture.supplyAsync(() -> { - - // If user is not authorized, then throw exception. - if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { - throw new IllegalArgumentException( - String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); - } - - try { - TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( - tagUrn.toString(), - TAG_PROPERTIES_ASPECT_NAME, - _entityService, - null); - - if (tagProperties == null) { - throw new IllegalArgumentException("Failed to set tag color. Tag properties does not yet exist!"); - } - - tagProperties.setColorHex(colorHex); - - // Update the TagProperties aspect. - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set color for Tag with urn %s", tagUrn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + + // If user is not authorized, then throw exception. + if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // If tag does not exist, then throw exception. + if (!_entityService.exists(context.getOperationContext(), tagUrn, true)) { + throw new IllegalArgumentException( + String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); + } + + try { + TagProperties tagProperties = + (TagProperties) + EntityUtils.getAspectFromEntity( + context.getOperationContext(), + tagUrn.toString(), + TAG_PROPERTIES_ASPECT_NAME, + _entityService, + null); + + if (tagProperties == null) { + throw new IllegalArgumentException( + "Failed to set tag color. Tag properties does not yet exist!"); + } + + tagProperties.setColorHex(colorHex); + + // Update the TagProperties aspect. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + return true; + } catch (Exception e) { + log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); + throw new RuntimeException( + String.format("Failed to set color for Tag with urn %s", tagUrn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -92,4 +107,4 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java index 14ae9f96eb6831..25fab6281f9483 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java @@ -1,8 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateTestInput; import com.linkedin.entity.client.EntityClient; @@ -15,15 +21,7 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Creates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class CreateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,39 +34,47 @@ public CreateTestResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final CreateTestInput input = bindArgument(environment.getArgument("input"), CreateTestInput.class); - - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - - try { - - // Create new test - // Since we are creating a new Test, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); - - // Create the Ingestion source key - final TestKey key = new TestKey(); - key.setId(uuidStr); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), - authentication)) { - throw new IllegalArgumentException("This Test already exists!"); + final CreateTestInput input = + bindArgument(environment.getArgument("input"), CreateTestInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (canManageTests(context)) { + + try { + + // Create new test + // Since we are creating a new Test, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); + + // Create the Ingestion source key + final TestKey key = new TestKey(); + key.setId(uuidStr); + + if (_entityClient.exists( + context.getOperationContext(), + EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME))) { + throw new IllegalArgumentException("This Test already exists!"); + } + + // Create the Test info. + final TestInfo info = mapCreateTestInput(input); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } } - - // Create the Test info. - final TestInfo info = mapCreateTestInput(input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private static TestInfo mapCreateTestInput(final CreateTestInput input) { @@ -79,5 +85,4 @@ private static TestInfo mapCreateTestInput(final CreateTestInput input) { result.setDefinition(mapDefinition(input.getDefinition())); return result; } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java index e0c878dc652bd7..d9a7ed3f90ebee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - /** * Resolver responsible for hard deleting a particular DataHub Test. Requires MANAGE_TESTS @@ -23,20 +24,26 @@ public DeleteTestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String testUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(testUrn); - return CompletableFuture.supplyAsync(() -> { - if (canManageTests(context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against Test with urn %s", testUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (canManageTests(context)) { + try { + _entityClient.deleteEntity(context.getOperationContext(), urn); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against Test with urn %s", testUrn), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index a1e1e48aae8473..9be0bcb0e18397 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -1,15 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.datahub.graphql.generated.ListTestsResult; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -20,12 +23,9 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - - /** - * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform + * privilege. */ public class ListTestsResolver implements DataFetcher> { @@ -39,45 +39,53 @@ public ListTestsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - final ListTestsInput input = bindArgument(environment.getArgument("input"), ListTestsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? "" : input.getQuery(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (canManageTests(context) || canViewTests(context)) { + final ListTestsInput input = + bindArgument(environment.getArgument("input"), ListTestsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? "" : input.getQuery(); - try { - // First, get all group Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.TEST_ENTITY_NAME, - query, - Collections.emptyMap(), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + Constants.TEST_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count); - // Now that we have entities we can bind this to a result. - final ListTestsResult result = new ListTestsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setTests(mapUnresolvedTests(gmsResult.getEntities())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list tests", e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Now that we have entities we can bind this to a result. + final ListTestsResult result = new ListTestsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setTests(mapUnresolvedTests(gmsResult.getEntities())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list tests", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } - // This method maps urns returned from the list endpoint into Partial Test objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Test objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedTests(final SearchEntityArray entityArray) { final List results = new ArrayList<>(); for (final SearchEntity entity : entityArray) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java index 9c4b5a4d4e0fa3..df3566cf0efab9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.TestResult; @@ -20,10 +21,7 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of tests for an entity - */ +/** GraphQL Resolver used for fetching the list of tests for an entity */ @Slf4j public class TestResultsResolver implements DataFetcher> { @@ -38,42 +36,46 @@ public CompletableFuture get(DataFetchingEnvironment environment) t final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); - if (gmsTestResults == null) { - return null; - } + if (gmsTestResults == null) { + return null; + } - TestResults testResults = new TestResults(); - testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); - testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); - return testResults; - }); + TestResults testResults = new TestResults(); + testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); + testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); + return testResults; + }, + this.getClass().getSimpleName(), + "get"); } @Nullable - private com.linkedin.test.TestResults getTestResults(final Urn entityUrn, final QueryContext context) { + private com.linkedin.test.TestResults getTestResults( + final Urn entityUrn, final QueryContext context) { try { - final EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), - context.getAuthentication()); - if (entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { + final EntityResponse entityResponse = + _entityClient.getV2( + context.getOperationContext(), + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME)); + if (entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { return new com.linkedin.test.TestResults( - entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME) - .getValue() - .data()); + entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME).getValue().data()); } return null; } catch (Exception e) { - throw new RuntimeException("Failed to get test results", e); + throw new RuntimeException("Failed to get test results", e); } } - private List mapTestResults(final @Nonnull List gmsResults) { + private List mapTestResults( + final @Nonnull List gmsResults) { final List results = new ArrayList<>(); for (com.linkedin.test.TestResult gmsResult : gmsResults) { results.add(mapTestResult(gmsResult)); @@ -89,4 +91,4 @@ private TestResult mapTestResult(final @Nonnull com.linkedin.test.TestResult gms testResult.setType(TestResultType.valueOf(gmsResult.getType().toString())); return testResult; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 248da3e58d8aef..020064ed643c88 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + +import com.datahub.authorization.AuthUtil; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -12,19 +15,20 @@ import com.linkedin.test.TestDefinition; import com.linkedin.test.TestDefinitionType; import java.util.Map; -import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; - - public class TestUtils { - /** - * Returns true if the authenticated user is able to manage tests. - */ + /** Returns true if the authenticated user is able to view tests. */ + public static boolean canViewTests(@Nonnull QueryContext context) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_TESTS_PRIVILEGE); + } + + /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } public static TestDefinition mapDefinition(final TestDefinitionInput testDefInput) { @@ -38,11 +42,12 @@ public static EntityResponse buildEntityResponse(Map asp final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java index 1dd8518076796a..20973170f5686a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java @@ -1,9 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; @@ -13,15 +19,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Updates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Updates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class UpdateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -35,26 +33,32 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (canManageTests(context)) { - final String urn = environment.getArgument("urn"); - final UpdateTestInput input = bindArgument(environment.getArgument("input"), UpdateTestInput.class); + final String urn = environment.getArgument("urn"); + final UpdateTestInput input = + bindArgument(environment.getArgument("input"), UpdateTestInput.class); - // Update the Test info - currently this simply creates a new test with same urn. - final TestInfo info = mapUpdateTestInput(input); + // Update the Test info - currently this simply creates a new test with same urn. + final TestInfo info = mapUpdateTestInput(input); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); - try { - return _entityClient.ingestProposal(proposal, authentication, false); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); + try { + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private static TestInfo mapUpdateTestInput(final UpdateTestInput input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java index 499e7c9ac177d9..fd1179f91f3bdb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; import com.linkedin.datahub.graphql.types.timeline.mappers.SchemaBlameMapper; @@ -16,15 +19,13 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. TODO: Add tests for this resolver. */ @Slf4j -public class GetSchemaBlameResolver implements DataFetcher> { +public class GetSchemaBlameResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaBlameResolver(TimelineService timelineService) { @@ -32,37 +33,39 @@ public GetSchemaBlameResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final GetSchemaBlameInput input = + bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); final String datasetUrnString = input.getDatasetUrn(); final long startTime = 0; final long endTime = 0; final String version = input.getVersion() == null ? null : input.getVersion(); - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); - final Urn datasetUrn = Urn.createFromString(datasetUrnString); - final List changeTransactionList = - _timelineService.getTimeline( - datasetUrn, - changeCategorySet, - startTime, - endTime, - null, - null, - false); - return SchemaBlameMapper.map(changeTransactionList, version); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Set changeCategorySet = + Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); + final Urn datasetUrn = Urn.createFromString(datasetUrnString); + final List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaBlameMapper.map(changeTransactionList, version); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java index cfad1395a61a88..28f3c544f9d16f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListInput; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.types.timeline.mappers.SchemaVersionListMapper; @@ -16,14 +19,12 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. */ @Slf4j -public class GetSchemaVersionListResolver implements DataFetcher> { +public class GetSchemaVersionListResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaVersionListResolver(TimelineService timelineService) { @@ -31,7 +32,8 @@ public GetSchemaVersionListResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final GetSchemaVersionListInput input = bindArgument(environment.getArgument("input"), GetSchemaVersionListInput.class); @@ -39,23 +41,29 @@ public CompletableFuture get(final DataFetchingEnvir final long startTime = 0; final long endTime = 0; - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = new HashSet<>(); - changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); - Urn datasetUrn = Urn.createFromString(datasetUrnString); - List changeTransactionList = - _timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false); - return SchemaVersionListMapper.map(changeTransactionList); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final Set changeCategorySet = new HashSet<>(); + changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); + Urn datasetUrn = Urn.createFromString(datasetUrnString); + List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaVersionListMapper.map(changeTransactionList); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java index 45998bdae45b04..14429696fefd42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java @@ -5,16 +5,18 @@ import graphql.schema.TypeResolver; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface + * type. */ public class AspectInterfaceTypeResolver implements TypeResolver { - public AspectInterfaceTypeResolver() { } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this - // out in the case we ever want to return fields of type Aspect in graphql. Right now - // we just use Aspect to define the shared `version` field. - return null; - } + public AspectInterfaceTypeResolver() {} + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this + // out in the case we ever want to return fields of type Aspect in graphql. Right now + // we just use Aspect to define the shared `version` field. + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java index 1a5f06da040141..52c20254332b39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java @@ -6,27 +6,29 @@ import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - import java.util.List; import java.util.stream.Collectors; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface + * type. */ public class EntityInterfaceTypeResolver implements TypeResolver { - private final List> _entities; + private final List> _entities; - public EntityInterfaceTypeResolver(final List> entities) { - _entities = entities; - } + public EntityInterfaceTypeResolver(final List> entities) { + _entities = entities; + } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - Object javaObject = env.getObject(); - final LoadableType filteredEntity = Iterables.getOnlyElement(_entities.stream() + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + Object javaObject = env.getObject(); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _entities.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); - } + return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java index a69500f24ee24a..aeeb9bafa1f4c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java @@ -4,30 +4,30 @@ import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class HyperParameterValueTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; - public static final String INT_BOX = "IntBox"; - public static final String FLOAT_BOX = "FloatBox"; - public static final String BOOLEAN_BOX = "BooleanBox"; + public static final String STRING_BOX = "StringBox"; + public static final String INT_BOX = "IntBox"; + public static final String FLOAT_BOX = "FloatBox"; + public static final String BOOLEAN_BOX = "BooleanBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else if (env.getObject() instanceof IntBox) { - return env.getSchema().getObjectType(INT_BOX); - } else if (env.getObject() instanceof BooleanBox) { - return env.getSchema().getObjectType(BOOLEAN_BOX); - } else if (env.getObject() instanceof FloatBox) { - return env.getSchema().getObjectType(FLOAT_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else if (env.getObject() instanceof IntBox) { + return env.getSchema().getObjectType(INT_BOX); + } else if (env.getObject() instanceof BooleanBox) { + return env.getSchema().getObjectType(BOOLEAN_BOX); + } else if (env.getObject() instanceof FloatBox) { + return env.getSchema().getObjectType(FLOAT_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java index 25a9a540f51b18..ff190cff1339e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java @@ -8,17 +8,17 @@ public class PlatformSchemaUnionTypeResolver implements TypeResolver { - private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; - private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; + private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; + private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TableSchema) { - return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); - } else if (env.getObject() instanceof KeyValueSchema) { - return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TableSchema) { + return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); + } else if (env.getObject() instanceof KeyValueSchema) { + return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); + } else { + throw new RuntimeException("Unrecognized object type provided to type resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java new file mode 100644 index 00000000000000..cb0d24839056dc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PropertyValueResolver.java @@ -0,0 +1,25 @@ +package com.linkedin.datahub.graphql.resolvers.type; + +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import graphql.TypeResolutionEnvironment; +import graphql.schema.GraphQLObjectType; +import graphql.schema.TypeResolver; + +public class PropertyValueResolver implements TypeResolver { + + public static final String STRING_VALUE = "StringValue"; + public static final String NUMBER_VALUE = "NumberValue"; + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringValue) { + return env.getSchema().getObjectType(STRING_VALUE); + } else if (env.getObject() instanceof NumberValue) { + return env.getSchema().getObjectType(NUMBER_VALUE); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java index 0dc7b0485c51cf..c5be5725f1d45c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.type; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class ResultsTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; + public static final String STRING_BOX = "StringBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java index 5263e6b9b7df6f..c66588008b1030 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java @@ -6,7 +6,8 @@ public class TimeSeriesAspectInterfaceTypeResolver implements TypeResolver { - public TimeSeriesAspectInterfaceTypeResolver() { } + public TimeSeriesAspectInterfaceTypeResolver() {} + @Override public GraphQLObjectType getType(TypeResolutionEnvironment env) { // TODO(John): Fill this out. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java index d02f1a5f786a74..5ffab37c3408ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateNativeUserResetTokenInput; import com.linkedin.datahub.graphql.generated.ResetToken; @@ -10,14 +14,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Resolver responsible for creating a password reset token that Admins can share with native users to reset their - * credentials. + * Resolver responsible for creating a password reset token that Admins can share with native users + * to reset their credentials. */ -public class CreateNativeUserResetTokenResolver implements DataFetcher> { +public class CreateNativeUserResetTokenResolver + implements DataFetcher> { private final NativeUserService _nativeUserService; public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { @@ -25,7 +27,8 @@ public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserServ } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateNativeUserResetTokenInput input = bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); @@ -38,15 +41,20 @@ public CompletableFuture get(final DataFetchingEnvironment environme "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - String resetToken = - _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); - return new ResetToken(resetToken); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to generate password reset token for user: %s", userUrnString)); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken( + context.getOperationContext(), userUrnString); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to generate password reset token for user: %s", userUrnString)); + } + }, + this.getClass().getSimpleName(), + "get"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 69da642ad6bb18..2445ff3130ba93 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.ListUsersInput; @@ -10,7 +14,6 @@ import com.linkedin.datahub.graphql.types.corpuser.mappers.CorpUserMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; @@ -22,10 +25,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class ListUsersResolver implements DataFetcher> { @@ -40,48 +40,64 @@ public ListUsersResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListUsersInput input = bindArgument(environment.getArgument("input"), ListUsersInput.class); + final ListUsersInput input = + bindArgument(environment.getArgument("input"), ListUsersInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + // First, get all policy Urns. + final SearchResult gmsResult = + _entityClient.search( + context + .getOperationContext() + .withSearchFlags(flags -> flags.setFulltext(true)), + CORP_USER_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count); - // Then, get hydrate all users. - final Map entities = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()) - ), null, context.getAuthentication()); + // Then, get hydrate all users. + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + CORP_USER_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null); - // Now that we have entities we can bind this to a result. - final ListUsersResult result = new ListUsersResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list users", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListUsersResult result = new ListUsersResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setUsers(mapEntities(context, entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list users", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final Collection entities) { - return entities.stream() - .map(CorpUserMapper::map) - .collect(Collectors.toList()); + private static List mapEntities( + @Nullable QueryContext context, final Collection entities) { + return entities.stream().map(e -> CorpUserMapper.map(context, e)).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java index 718810e4710e7e..7fa8bf7333f5fd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java @@ -3,6 +3,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; @@ -10,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp User - */ +/** Resolver responsible for hard deleting a particular DataHub Corp User */ @Slf4j public class RemoveUserResolver implements DataFetcher> { @@ -24,30 +22,41 @@ public RemoveUserResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String userUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(userUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return GraphQLConcurrencyUtils.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(context.getOperationContext(), urn); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(context.getOperationContext(), urn); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for user with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against user with urn %s", userUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against user with urn %s", userUrn), e); - } - }); + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java index ab04d26fb5801f..88130ade61a676 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java @@ -1,10 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.entity.client.EntityClient; @@ -13,12 +17,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform privilege. + * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform + * privilege. */ public class UpdateUserStatusResolver implements DataFetcher> { @@ -37,20 +38,30 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final CorpUserStatus newStatus = CorpUserStatus.valueOf(environment.getArgument("status")); // Create ths status aspect - final com.linkedin.identity.CorpUserStatus statusAspect = new com.linkedin.identity.CorpUserStatus(); + final com.linkedin.identity.CorpUserStatus statusAspect = + new com.linkedin.identity.CorpUserStatus(); statusAspect.setStatus(newStatus.toString()); - statusAspect.setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(Urn.createFromString(context.getActorUrn()))); - - return CompletableFuture.supplyAsync(() -> { - try { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(userUrn), - CORP_USER_STATUS_ASPECT_NAME, statusAspect); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update user status for urn", userUrn), e); - } - }); + statusAspect.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(userUrn), CORP_USER_STATUS_ASPECT_NAME, statusAspect); + return _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update user status for urn", userUrn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java index 6e39879dd56bcc..7c3e433dd1ede4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateViewInput; import com.linkedin.datahub.graphql.generated.DataHubView; @@ -18,12 +21,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class CreateViewResolver implements DataFetcher> { @@ -34,29 +32,38 @@ public CreateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateViewInput input = bindArgument(environment.getArgument("input"), CreateViewInput.class); + final CreateViewInput input = + bindArgument(environment.getArgument("input"), CreateViewInput.class); - return CompletableFuture.supplyAsync(() -> { - if (ViewUtils.canCreateView( - DataHubViewType.valueOf(input.getViewType().toString()), - context)) { - try { - final Urn urn = _viewService.createView( - DataHubViewType.valueOf(input.getViewType().toString()), - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - return createView(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create View with input: %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + if (ViewUtils.canCreateView( + DataHubViewType.valueOf(input.getViewType().toString()), context)) { + try { + final Urn urn = + _viewService.createView( + context.getOperationContext(), + DataHubViewType.valueOf(input.getViewType().toString()), + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition( + input.getDefinition(), + context.getOperationContext().getAspectRetriever()), + System.currentTimeMillis()); + return createView(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create View with input: %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }, + this.getClass().getSimpleName(), + "get"); } private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateViewInput input) { @@ -66,15 +73,20 @@ private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateView .setViewType(input.getViewType()) .setName(input.getName()) .setDescription(input.getDescription()) - .setDefinition(new DataHubViewDefinition( - input.getDefinition().getEntityTypes(), - new DataHubViewFilter( - input.getDefinition().getFilter().getOperator(), - input.getDefinition().getFilter().getFilters().stream().map(filterInput -> - new FacetFilter(filterInput.getField(), filterInput.getCondition(), - filterInput.getValues(), - filterInput.getNegated())) - .collect(Collectors.toList())))) + .setDefinition( + new DataHubViewDefinition( + input.getDefinition().getEntityTypes(), + new DataHubViewFilter( + input.getDefinition().getFilter().getOperator(), + input.getDefinition().getFilter().getFilters().stream() + .map( + filterInput -> + new FacetFilter( + filterInput.getField(), + filterInput.getCondition(), + filterInput.getValues(), + filterInput.getNegated())) + .collect(Collectors.toList())))) .build(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java index 2b8c3b8640aa88..974b6f37203354 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java @@ -2,6 +2,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.metadata.service.ViewService; import graphql.schema.DataFetcher; @@ -11,10 +12,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub View - */ +/** Resolver responsible for hard deleting a particular DataHub View */ @Slf4j public class DeleteViewResolver implements DataFetcher> { @@ -25,24 +23,29 @@ public DeleteViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.deleteView(urn, context.getAuthentication()); - log.info(String.format("Successfully deleted View %s with urn", urn)); - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against View with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.deleteView(context.getOperationContext(), urn); + log.info(String.format("Successfully deleted View %s with urn", urn)); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against View with urn %s", urn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 51bbcfcfa25ae4..265f4d5f5d56e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewType; @@ -13,7 +16,7 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -28,22 +31,17 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing global DataHub Views. - */ +/** Resolver used for listing global DataHub Views. */ @Slf4j public class ListGlobalViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -55,43 +53,51 @@ public ListGlobalViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListGlobalViewsInput input = bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); + final ListGlobalViewsInput input = + bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - try { + try { - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(context.getOperationContext().getAspectRetriever()), + Collections.singletonList(DEFAULT_SORT_CRITERION), + start, + count); - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list global Views", e); - } - }); + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list global Views", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -103,12 +109,17 @@ private List mapUnresolvedViews(final List entityUrns) { return results; } - private Filter buildFilters() { + private Filter buildFilters(@Nullable AspectRetriever aspectRetriever) { final AndFilterInput globalCriteria = new AndFilterInput(); List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(DataHubViewType.GLOBAL.name()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, + null, + ImmutableList.of(DataHubViewType.GLOBAL.name()), + false, + FilterOperator.EQUAL)); globalCriteria.setAnd(andConditions); - return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria)); + return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria), aspectRetriever); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index d8705e216503cc..abfdeb2d608693 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.generated.AndFilterInput; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.EntityType; @@ -12,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -30,21 +33,15 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing the current user's DataHub Views. - */ +/** Resolver used for listing the current user's DataHub Views. */ @Slf4j public class ListMyViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; private static final String CREATOR_URN_FIELD = "createdBy"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -56,44 +53,56 @@ public ListMyViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListMyViewsInput input = bindArgument(environment.getArgument("input"), ListMyViewsInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final String viewType = input.getViewType() == null ? null : input.getViewType().toString(); - - try { - - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(viewType, context.getActorUrn()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Views", e); - } - }); + final ListMyViewsInput input = + bindArgument(environment.getArgument("input"), ListMyViewsInput.class); + + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final String viewType = + input.getViewType() == null ? null : input.getViewType().toString(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters( + viewType, + context.getActorUrn(), + context.getOperationContext().getAspectRetriever()), + Collections.singletonList(DEFAULT_SORT_CRITERION), + start, + count); + + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Views", e); + } + }, + this.getClass().getSimpleName(), + "get"); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -105,23 +114,24 @@ private List mapUnresolvedViews(final List entityUrns) { return results; } - private Filter buildFilters(@Nullable final String viewType, final String creatorUrn) { + private Filter buildFilters( + @Nullable final String viewType, + final String creatorUrn, + @Nullable AspectRetriever aspectRetriever) { // And GLOBAL views for the authenticated actor. final AndFilterInput filterCriteria = new AndFilterInput(); final List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(CREATOR_URN_FIELD, - null, - ImmutableList.of(creatorUrn), - false, - FilterOperator.EQUAL)); + new FacetFilterInput( + CREATOR_URN_FIELD, null, ImmutableList.of(creatorUrn), false, FilterOperator.EQUAL)); if (viewType != null) { andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); } filterCriteria.setAnd(andConditions); // Currently, there is no way to fetch the views belonging to another user. - return buildFilter(Collections.emptyList(), ImmutableList.of(filterCriteria)); + return buildFilter(Collections.emptyList(), ImmutableList.of(filterCriteria), aspectRetriever); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 61e22da3c94447..11ec1c5705bd31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.UpdateViewInput; @@ -14,13 +17,10 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class UpdateViewResolver implements DataFetcher> { @@ -31,41 +31,54 @@ public UpdateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); - final UpdateViewInput input = bindArgument(environment.getArgument("input"), UpdateViewInput.class); + final UpdateViewInput input = + bindArgument(environment.getArgument("input"), UpdateViewInput.class); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.updateView( - urn, - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.updateView( + context.getOperationContext(), + urn, + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition( + input.getDefinition(), context.getOperationContext().getAspectRetriever()), + System.currentTimeMillis()); + log.info(String.format("Successfully updated View %s with urn", urn)); + return getView(context, urn, context.getAuthentication()); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }, + this.getClass().getSimpleName(), + "get"); } - private DataHubView getView(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); + private DataHubView getView( + @Nullable QueryContext context, + @Nonnull final Urn urn, + @Nonnull final Authentication authentication) { + final EntityResponse maybeResponse = + _viewService.getViewEntityResponse(context.getOperationContext(), urn); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); + String.format( + "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } - return DataHubViewMapper.map(maybeResponse); + return DataHubViewMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index dda0c3bebc2ebe..70a5ced4bfbf10 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -11,8 +11,9 @@ import com.linkedin.datahub.graphql.generated.DataHubViewFilterInput; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.CriterionArray; @@ -25,58 +26,64 @@ import java.util.Objects; import java.util.stream.Collectors; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class ViewUtils { /** * Returns true if the authenticated actor is allowed to create a view with the given parameters. * - * The user can create a View if it's a personal View specific to them, or - * if it's a Global view and they have the correct Platform privileges. + *

The user can create a View if it's a personal View specific to them, or if it's a Global + * view and they have the correct Platform privileges. * * @param type the type of the new View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ public static boolean canCreateView( - @Nonnull DataHubViewType type, - @Nonnull QueryContext context) { + @Nonnull DataHubViewType type, @Nonnull QueryContext context) { Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(context, "context must not be null"); return DataHubViewType.PERSONAL.equals(type) - || (DataHubViewType.GLOBAL.equals(type) && AuthorizationUtils.canManageGlobalViews(context)); + || (DataHubViewType.GLOBAL.equals(type) + && AuthorizationUtils.canManageGlobalViews(context)); } - /** - * Returns true if the authenticated actor is allowed to update or delete - * the View with the specified urn. + * Returns true if the authenticated actor is allowed to update or delete the View with the + * specified urn. * * @param viewService an instance of {@link ViewService} * @param viewUrn the urn of the View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ - public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { + public static boolean canUpdateView( + @Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { Objects.requireNonNull(viewService, "viewService must not be null"); Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(context, "context must not be null"); // Retrieve the view, determine it's type, and then go from there. - final DataHubViewInfo viewInfo = viewService.getViewInfo(viewUrn, context.getAuthentication()); + final DataHubViewInfo viewInfo = + viewService.getViewInfo(context.getOperationContext(), viewUrn); if (viewInfo == null) { - throw new IllegalArgumentException(String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); } - // If the View is Global, then the user must have ability to manage global views OR must be its owner - if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) && AuthorizationUtils.canManageGlobalViews(context)) { + // If the View is Global, then the user must have ability to manage global views OR must be its + // owner + if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) + && AuthorizationUtils.canManageGlobalViews(context)) { return true; } // If the View is Personal, then the current actor must be the owner. - return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getActorUrn())); } /** @@ -86,54 +93,74 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U * @return the GMS model */ @Nonnull - public static DataHubViewDefinition mapDefinition(@Nonnull final DataHubViewDefinitionInput input) { + public static DataHubViewDefinition mapDefinition( + @Nonnull final DataHubViewDefinitionInput input, @Nullable AspectRetriever aspectRetriever) { Objects.requireNonNull(input, "input must not be null"); final DataHubViewDefinition result = new DataHubViewDefinition(); if (input.getFilter() != null) { - result.setFilter(mapFilter(input.getFilter()), SetMode.IGNORE_NULL); + result.setFilter(mapFilter(input.getFilter(), aspectRetriever), SetMode.IGNORE_NULL); } - result.setEntityTypes(new StringArray(input.getEntityTypes().stream().map(EntityTypeMapper::getName).collect( - Collectors.toList()))); + result.setEntityTypes( + new StringArray( + input.getEntityTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()))); return result; } /** - * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} object, - * which is then persisted to the backend in an aspect. + * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} + * object, which is then persisted to the backend in an aspect. * - * We intentionally convert from a more rigid model to something more flexible to hedge for the case - * in which the views feature evolves to require more advanced filter capabilities. + *

We intentionally convert from a more rigid model to something more flexible to hedge for the + * case in which the views feature evolves to require more advanced filter capabilities. * - * The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), which cannot be - * rendered in full by the UI. We account for this on the read path by logging a warning and returning an empty - * View in such cases. + *

The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), + * which cannot be rendered in full by the UI. We account for this on the read path by logging a + * warning and returning an empty View in such cases. */ - private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { + private static Filter mapFilter( + @Nonnull DataHubViewFilterInput input, @Nullable AspectRetriever aspectRetriever) { if (LogicalOperator.AND.equals(input.getOperator())) { // AND - return buildAndFilter(input.getFilters()); + return buildAndFilter(input.getFilters(), aspectRetriever); } else { // OR - return buildOrFilter(input.getFilters()); + return buildOrFilter(input.getFilters(), aspectRetriever); } } - private static Filter buildAndFilter(@Nonnull List input) { + private static Filter buildAndFilter( + @Nonnull List input, @Nullable AspectRetriever aspectRetriever) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(input.stream().map(ResolverUtils::criterionFromFilter).collect(Collectors.toList())))) - )); + result.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + input.stream() + .map(f -> ResolverUtils.criterionFromFilter(f, aspectRetriever)) + .collect(Collectors.toList())))))); return result; } - private static Filter buildOrFilter(@Nonnull List input) { + private static Filter buildOrFilter( + @Nonnull List input, @Nullable AspectRetriever aspectRetriever) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(input.stream().map(filter -> - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(ResolverUtils.criterionFromFilter(filter)))) - ) - .collect(Collectors.toList()))); + result.setOr( + new ConjunctiveCriterionArray( + input.stream() + .map( + filter -> + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + ResolverUtils.criterionFromFilter( + filter, aspectRetriever))))) + .collect(Collectors.toList()))); return result; } @@ -141,6 +168,5 @@ private static boolean isViewOwner(Urn creatorUrn, Urn actorUrn) { return creatorUrn.equals(actorUrn); } - private ViewUtils() { } - + private ViewUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java index 51fd503fff5784..49c8c24c2b6be2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java @@ -1,4 +1,3 @@ package com.linkedin.datahub.graphql.scalar; -public class LongScalarType { -} +public class LongScalarType {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java index 3bd8719a37abc4..df7c729cb14c1d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java @@ -1,16 +1,18 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface BatchMutableType extends MutableType { - default Class batchInputClass() throws UnsupportedOperationException { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchInputClass method"); - } + default Class batchInputClass() throws UnsupportedOperationException { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchInputClass method"); + } - default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) throws Exception { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchUpdate method"); - } + default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) + throws Exception { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchUpdate method"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java index b50a229be0633b..368c126131af26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java @@ -5,42 +5,46 @@ import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FacetFilterInput; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * Extension of {@link EntityType} containing methods required for 'browse' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. * @param the key type for the DataLoader */ public interface BrowsableEntityType extends EntityType { - /** - * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. - * - * @param path the path to find browse results under - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & count. - * - * @param urn the entity urn to fetch browse paths for - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception; + /** + * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. + * + * @param path the path to find browse results under + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & + * count. + * + * @param urn the entity urn to fetch browse paths for + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java index 4185288776c065..43e4c1be55b71c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java @@ -3,20 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import java.util.function.Function; - /** - * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, etc.). + * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, + * etc.). * * @param : The GraphQL object type corresponding to the entity, must be of type {@link Entity} * @param the key type for the DataLoader */ public interface EntityType extends LoadableType { - /** - * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the Graph type, eg. 'DATASET' - */ - com.linkedin.datahub.graphql.generated.EntityType type(); - - Function getKeyProvider(); + /** + * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the + * Graph type, eg. 'DATASET' + */ + com.linkedin.datahub.graphql.generated.EntityType type(); + Function getKeyProvider(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java index a21fab09b79c36..9f9fe1f28994c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java @@ -2,10 +2,9 @@ import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; - import graphql.execution.DataFetcherResult; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * GQL graph type that can be loaded from a downstream service by primary key. @@ -15,35 +14,38 @@ */ public interface LoadableType { - /** - * Returns generated GraphQL class associated with the type - */ - Class objectClass(); - - /** - * Returns the name of the type, to be used in creating a corresponding GraphQL {@link org.dataloader.DataLoader} - */ - default String name() { - return objectClass().getSimpleName(); - } - - /** - * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity cannot be found. - * - * @param key to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) throws Exception { - return batchLoad(ImmutableList.of(key), context).get(0); - }; - - /** - * Retrieves an list of entities given a list of urn strings. The list returned is expected to - * be of same length of the list of urns, where nulls are provided in place of an entity object if an entity cannot be found. - * - * @param keys to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - List> batchLoad(@Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; - + /** Returns generated GraphQL class associated with the type */ + Class objectClass(); + + /** + * Returns the name of the type, to be used in creating a corresponding GraphQL {@link + * org.dataloader.DataLoader} + */ + default String name() { + return objectClass().getSimpleName(); + } + + /** + * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity + * cannot be found. + * + * @param key to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) + throws Exception { + return batchLoad(ImmutableList.of(key), context).get(0); + } + ; + + /** + * Retrieves an list of entities given a list of urn strings. The list returned is expected to be + * of same length of the list of urns, where nulls are provided in place of an entity object if an + * entity cannot be found. + * + * @param keys to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + List> batchLoad( + @Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java index 94f1200d3a7833..fa241929133241 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - import javax.annotation.Nonnull; /** @@ -10,18 +9,16 @@ * @param : The input type corresponding to the write. */ public interface MutableType { - /** - * Returns generated GraphQL class associated with the input type - */ - - Class inputClass(); + /** Returns generated GraphQL class associated with the input type */ + Class inputClass(); - /** - * Update an entity by urn - * - * @param urn - * @param input input type - * @param context the {@link QueryContext} corresponding to the request. - */ - T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) throws Exception; + /** + * Update an entity by urn + * + * @param urn + * @param input input type + * @param context the {@link QueryContext} corresponding to the request. + */ + T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java index 96875956d22c10..a5ade054e71ebb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java @@ -6,52 +6,61 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.query.filter.Filter; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. * - * Extension of {@link EntityType} containing methods required for 'search' functionality. + *

Extension of {@link EntityType} containing methods required for 'search' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. */ @Deprecated public interface SearchableEntityType extends EntityType { - /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. - * - * Retrieves {@link SearchResults} corresponding to a given query string, list of filters, start index, & count. - * - * @param query query text - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Deprecated - SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of filters, & limit. - * - * @param query query text - * @param field the name of the field to autocomplete against, null if one was not provided - * @param filters list of filters that should be applied to search results, null if non were provided - * @param limit the maximum number of autocomplete suggestions to be returned - * @param context the {@link QueryContext} corresponding to the request. - */ - AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception; + /** + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. + * + *

Retrieves {@link SearchResults} corresponding to a given query string, list of filters, + * start index, & count. + * + * @param query query text + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Deprecated + SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of + * filters, & limit. + * + * @param query query text + * @param field the name of the field to autocomplete against, null if one was not provided + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param limit the maximum number of autocomplete suggestions to be returned + * @param context the {@link QueryContext} corresponding to the request. + */ + AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index c9e2c322ace8df..6e4259dde18c34 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -1,24 +1,31 @@ package com.linkedin.datahub.graphql.types.aspect; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaMetadataMapper; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); - public static Aspect map(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(aspect, entityUrn); + public static Aspect map( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); } - public Aspect apply(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + public Aspect apply( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { if (Constants.SCHEMA_METADATA_ASPECT_NAME.equals(aspect.getName())) { - return SchemaMetadataMapper.map(aspect, entityUrn); + return SchemaMetadataMapper.map(context, aspect, entityUrn); } return null; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index f3fdfdaa86f9e3..fbfdb500ecfb89 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.aspect; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.LoadableType; import com.linkedin.entity.EntityResponse; @@ -35,45 +37,58 @@ public String name() { } /** - * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list returned is expected to - * be of same length of the list of keys, where nulls are provided in place of an aspect object if an entity cannot be found. + * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list + * returned is expected to be of same length of the list of keys, where nulls are provided in + * place of an aspect object if an entity cannot be found. * * @param keys to retrieve * @param context the {@link QueryContext} corresponding to the request. */ - public List> batchLoad(@Nonnull List keys, @Nonnull QueryContext context) throws Exception { + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { try { - return keys.stream().map(key -> { - try { - Urn entityUrn = Urn.createFromString(key.getUrn()); - Map response = _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication() - ); + return keys.stream() + .map( + key -> { + try { + Urn entityUrn = Urn.createFromString(key.getUrn()); + + Map response = + canView(context.getOperationContext(), entityUrn) + ? _entityClient.batchGetV2( + context.getOperationContext(), + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName())) + : Map.of(); - EntityResponse entityResponse = response.get(entityUrn); + EntityResponse entityResponse = response.get(entityUrn); - if (entityResponse == null || entityResponse.getAspects().get(key.getAspectName()) == null) { - // The aspect was not found. Return null. - return DataFetcherResult.newResult().data(null).build(); - } - final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); - return DataFetcherResult.newResult().data(AspectMapper.map(aspect, entityUrn)).build(); - } catch (Exception e) { - if (e instanceof RestLiResponseException) { - // if no aspect is found, restli will return a 404 rather than null - // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls - if (((RestLiResponseException) e).getStatus() == 404) { - return DataFetcherResult.newResult().data(null).build(); - } - } - throw new RuntimeException(String.format("Failed to load Aspect for entity %s", key.getUrn()), e); - } - }).collect(Collectors.toList()); + if (entityResponse == null + || entityResponse.getAspects().get(key.getAspectName()) == null) { + // The aspect was not found. Return null. + return DataFetcherResult.newResult().data(null).build(); + } + final EnvelopedAspect aspect = + entityResponse.getAspects().get(key.getAspectName()); + return DataFetcherResult.newResult() + .data(AspectMapper.map(context, aspect, entityUrn)) + .build(); + } catch (Exception e) { + if (e instanceof RestLiResponseException) { + // if no aspect is found, restli will return a 404 rather than null + // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls + if (((RestLiResponseException) e).getStatus() == 404) { + return DataFetcherResult.newResult().data(null).build(); + } + } + throw new RuntimeException( + String.format("Failed to load Aspect for entity %s", key.getUrn()), e); + } + }) + .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Aspects", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index e1d81bb31f4712..a5f6cadb41566e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -1,34 +1,59 @@ package com.linkedin.datahub.graphql.types.assertion; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; + +import com.linkedin.assertion.AssertionAction; +import com.linkedin.assertion.AssertionActions; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.GlobalTags; +import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.AssertionActionType; +import com.linkedin.datahub.graphql.generated.AssertionSource; +import com.linkedin.datahub.graphql.generated.AssertionSourceType; import com.linkedin.datahub.graphql.generated.AssertionStdAggregation; import com.linkedin.datahub.graphql.generated.AssertionStdOperator; import com.linkedin.datahub.graphql.generated.AssertionStdParameter; import com.linkedin.datahub.graphql.generated.AssertionStdParameterType; import com.linkedin.datahub.graphql.generated.AssertionStdParameters; import com.linkedin.datahub.graphql.generated.AssertionType; +import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.CustomAssertionInfo; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DatasetAssertionInfo; import com.linkedin.datahub.graphql.generated.DatasetAssertionScope; +import com.linkedin.datahub.graphql.generated.DateInterval; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FieldAssertionInfo; +import com.linkedin.datahub.graphql.generated.FixedIntervalSchedule; +import com.linkedin.datahub.graphql.generated.FreshnessAssertionInfo; +import com.linkedin.datahub.graphql.generated.SchemaAssertionCompatibility; +import com.linkedin.datahub.graphql.generated.SchemaAssertionField; +import com.linkedin.datahub.graphql.generated.SchemaAssertionInfo; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; +import com.linkedin.datahub.graphql.generated.SqlAssertionInfo; +import com.linkedin.datahub.graphql.generated.VolumeAssertionInfo; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.StringMapMapper; +import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaFieldMapper; +import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaMetadataMapper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; +import com.linkedin.schema.SchemaField; import java.util.Collections; import java.util.stream.Collectors; - +import javax.annotation.Nullable; public class AssertionMapper { - public static Assertion map(final EntityResponse entityResponse) { + public static Assertion map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Assertion result = new Assertion(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -36,58 +61,175 @@ public static Assertion map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.ASSERTION); - final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { - result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); + result.setInfo( + mapAssertionInfo(context, new AssertionInfo(envelopedAssertionInfo.getValue().data()))); + } + + final EnvelopedAspect envelopedAssertionActions = + aspects.get(Constants.ASSERTION_ACTIONS_ASPECT_NAME); + if (envelopedAssertionActions != null) { + result.setActions( + mapAssertionActions(new AssertionActions(envelopedAssertionActions.getValue().data()))); } - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } + final EnvelopedAspect envelopedStatus = aspects.get(Constants.STATUS_ASPECT_NAME); + if (envelopedStatus != null) { + result.setStatus(mapStatus(new Status(envelopedStatus.getValue().data()))); + } + + final EnvelopedAspect envelopedTags = aspects.get(GLOBAL_TAGS_ASPECT_NAME); + if (envelopedTags != null) { + result.setTags( + GlobalTagsMapper.map( + context, new GlobalTags(envelopedTags.getValue().data()), entityUrn)); + } + return result; } - private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertionInfo( - final AssertionInfo gmsAssertionInfo) { + private static com.linkedin.datahub.graphql.generated.Status mapStatus(Status status) { + final com.linkedin.datahub.graphql.generated.Status result = + new com.linkedin.datahub.graphql.generated.Status(); + result.setRemoved(status.isRemoved()); + return result; + } + + public static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertionInfo( + @Nullable QueryContext context, final AssertionInfo gmsAssertionInfo) { final com.linkedin.datahub.graphql.generated.AssertionInfo assertionInfo = new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); + + if (gmsAssertionInfo.hasLastUpdated()) { + assertionInfo.setLastUpdated( + new AuditStamp( + gmsAssertionInfo.getLastUpdated().getTime(), + gmsAssertionInfo.getLastUpdated().getActor().toString())); + } if (gmsAssertionInfo.hasDatasetAssertion()) { - DatasetAssertionInfo datasetAssertion = mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + DatasetAssertionInfo datasetAssertion = + mapDatasetAssertionInfo(context, gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } + // Description + if (gmsAssertionInfo.hasDescription()) { + assertionInfo.setDescription(gmsAssertionInfo.getDescription()); + } + // FRESHNESS Assertions + if (gmsAssertionInfo.hasFreshnessAssertion()) { + FreshnessAssertionInfo freshnessAssertionInfo = + FreshnessAssertionMapper.mapFreshnessAssertionInfo( + context, gmsAssertionInfo.getFreshnessAssertion()); + assertionInfo.setFreshnessAssertion(freshnessAssertionInfo); + } + // VOLUME Assertions + if (gmsAssertionInfo.hasVolumeAssertion()) { + VolumeAssertionInfo volumeAssertionInfo = + VolumeAssertionMapper.mapVolumeAssertionInfo( + context, gmsAssertionInfo.getVolumeAssertion()); + assertionInfo.setVolumeAssertion(volumeAssertionInfo); + } + // SQL Assertions + if (gmsAssertionInfo.hasSqlAssertion()) { + SqlAssertionInfo sqlAssertionInfo = + SqlAssertionMapper.mapSqlAssertionInfo(gmsAssertionInfo.getSqlAssertion()); + assertionInfo.setSqlAssertion(sqlAssertionInfo); + } + // FIELD Assertions + if (gmsAssertionInfo.hasFieldAssertion()) { + FieldAssertionInfo fieldAssertionInfo = + FieldAssertionMapper.mapFieldAssertionInfo(context, gmsAssertionInfo.getFieldAssertion()); + assertionInfo.setFieldAssertion(fieldAssertionInfo); + } + // SCHEMA Assertions + if (gmsAssertionInfo.hasSchemaAssertion()) { + SchemaAssertionInfo schemaAssertionInfo = + mapSchemaAssertionInfo(context, gmsAssertionInfo.getSchemaAssertion()); + assertionInfo.setSchemaAssertion(schemaAssertionInfo); + } + if (gmsAssertionInfo.hasCustomAssertion()) { + CustomAssertionInfo customAssertionInfo = + mapCustomAssertionInfo(context, gmsAssertionInfo.getCustomAssertion()); + assertionInfo.setCustomAssertion(customAssertionInfo); + } + + // Source Type + if (gmsAssertionInfo.hasSource()) { + assertionInfo.setSource(mapSource(gmsAssertionInfo.getSource())); + } + + if (gmsAssertionInfo.hasExternalUrl()) { + assertionInfo.setExternalUrl(gmsAssertionInfo.getExternalUrl().toString()); + } return assertionInfo; } + private static com.linkedin.datahub.graphql.generated.AssertionActions mapAssertionActions( + final AssertionActions gmsAssertionActions) { + final com.linkedin.datahub.graphql.generated.AssertionActions result = + new com.linkedin.datahub.graphql.generated.AssertionActions(); + if (gmsAssertionActions.hasOnFailure()) { + result.setOnFailure( + gmsAssertionActions.getOnFailure().stream() + .map(AssertionMapper::mapAssertionAction) + .collect(Collectors.toList())); + } + if (gmsAssertionActions.hasOnSuccess()) { + result.setOnSuccess( + gmsAssertionActions.getOnSuccess().stream() + .map(AssertionMapper::mapAssertionAction) + .collect(Collectors.toList())); + } + return result; + } + + private static com.linkedin.datahub.graphql.generated.AssertionAction mapAssertionAction( + final AssertionAction gmsAssertionAction) { + final com.linkedin.datahub.graphql.generated.AssertionAction result = + new com.linkedin.datahub.graphql.generated.AssertionAction(); + result.setType(AssertionActionType.valueOf(gmsAssertionAction.getType().toString())); + return result; + } + private static DatasetAssertionInfo mapDatasetAssertionInfo( + @Nullable QueryContext context, final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); - datasetAssertion.setDatasetUrn( - gmsDatasetAssertion.getDataset().toString()); - datasetAssertion.setScope( - DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); + datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); + datasetAssertion.setScope(DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); if (gmsDatasetAssertion.hasFields()) { - datasetAssertion.setFields(gmsDatasetAssertion.getFields() - .stream() - .map(AssertionMapper::mapDatasetSchemaField) - .collect(Collectors.toList())); + datasetAssertion.setFields( + gmsDatasetAssertion.getFields().stream() + .map(AssertionMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } else { datasetAssertion.setFields(Collections.emptyList()); } // Agg if (gmsDatasetAssertion.hasAggregation()) { - datasetAssertion.setAggregation(AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); + datasetAssertion.setAggregation( + AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); } // Op - datasetAssertion.setOperator(AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); + datasetAssertion.setOperator( + AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); // Params if (gmsDatasetAssertion.hasParameters()) { @@ -98,7 +240,8 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( datasetAssertion.setNativeType(gmsDatasetAssertion.getNativeType()); } if (gmsDatasetAssertion.hasNativeParameters()) { - datasetAssertion.setNativeParameters(StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + datasetAssertion.setNativeParameters( + StringMapMapper.map(context, gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } @@ -119,7 +262,8 @@ private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { return new SchemaFieldRef(schemaFieldUrn.toString(), schemaFieldUrn.getEntityKey().get(1)); } - private static AssertionStdParameters mapParameters(final com.linkedin.assertion.AssertionStdParameters params) { + protected static AssertionStdParameters mapParameters( + final com.linkedin.assertion.AssertionStdParameters params) { final AssertionStdParameters result = new AssertionStdParameters(); if (params.hasValue()) { result.setValue(mapParameter(params.getValue())); @@ -133,13 +277,85 @@ private static AssertionStdParameters mapParameters(final com.linkedin.assertion return result; } - private static AssertionStdParameter mapParameter(final com.linkedin.assertion.AssertionStdParameter param) { + private static AssertionStdParameter mapParameter( + final com.linkedin.assertion.AssertionStdParameter param) { final AssertionStdParameter result = new AssertionStdParameter(); result.setType(AssertionStdParameterType.valueOf(param.getType().name())); result.setValue(param.getValue()); return result; } - private AssertionMapper() { + protected static FixedIntervalSchedule mapFixedIntervalSchedule( + com.linkedin.assertion.FixedIntervalSchedule gmsFixedIntervalSchedule) { + FixedIntervalSchedule fixedIntervalSchedule = new FixedIntervalSchedule(); + fixedIntervalSchedule.setUnit(DateInterval.valueOf(gmsFixedIntervalSchedule.getUnit().name())); + fixedIntervalSchedule.setMultiple(gmsFixedIntervalSchedule.getMultiple()); + return fixedIntervalSchedule; + } + + private static AssertionSource mapSource(final com.linkedin.assertion.AssertionSource gmsSource) { + AssertionSource result = new AssertionSource(); + result.setType(AssertionSourceType.valueOf(gmsSource.getType().toString())); + if (gmsSource.hasCreated()) { + result.setCreated( + new AuditStamp( + gmsSource.getCreated().getTime(), gmsSource.getCreated().getActor().toString())); + } + return result; + } + + protected static com.linkedin.datahub.graphql.generated.SchemaFieldSpec mapSchemaFieldSpec( + final com.linkedin.schema.SchemaFieldSpec gmsField) { + final com.linkedin.datahub.graphql.generated.SchemaFieldSpec result = + new com.linkedin.datahub.graphql.generated.SchemaFieldSpec(); + result.setPath(gmsField.getPath()); + result.setType(gmsField.getType()); + result.setNativeType(gmsField.getNativeType()); + return result; } + + private static SchemaAssertionInfo mapSchemaAssertionInfo( + @Nullable final QueryContext context, + final com.linkedin.assertion.SchemaAssertionInfo gmsSchemaAssertionInfo) { + SchemaAssertionInfo result = new SchemaAssertionInfo(); + result.setCompatibility( + SchemaAssertionCompatibility.valueOf(gmsSchemaAssertionInfo.getCompatibility().name())); + result.setEntityUrn(gmsSchemaAssertionInfo.getEntity().toString()); + result.setSchema( + SchemaMetadataMapper.INSTANCE.apply( + context, gmsSchemaAssertionInfo.getSchema(), gmsSchemaAssertionInfo.getEntity(), 0L)); + result.setFields( + gmsSchemaAssertionInfo.getSchema().getFields().stream() + .map(AssertionMapper::mapSchemaField) + .collect(Collectors.toList())); + return result; + } + + private static CustomAssertionInfo mapCustomAssertionInfo( + @Nullable final QueryContext context, + final com.linkedin.assertion.CustomAssertionInfo gmsCustomAssertionInfo) { + CustomAssertionInfo result = new CustomAssertionInfo(); + result.setType(gmsCustomAssertionInfo.getType()); + result.setEntityUrn(gmsCustomAssertionInfo.getEntity().toString()); + if (gmsCustomAssertionInfo.hasField()) { + result.setField(AssertionMapper.mapDatasetSchemaField(gmsCustomAssertionInfo.getField())); + } + if (gmsCustomAssertionInfo.hasLogic()) { + result.setLogic(gmsCustomAssertionInfo.getLogic()); + } + + return result; + } + + private static SchemaAssertionField mapSchemaField(final SchemaField gmsField) { + SchemaAssertionField result = new SchemaAssertionField(); + result.setPath(gmsField.getFieldPath()); + result.setType(new SchemaFieldMapper().mapSchemaFieldDataType(gmsField.getType())); + if (gmsField.hasNativeDataType()) { + result.setNativeType(gmsField.getNativeDataType()); + } + return result; + } + + protected AssertionMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index 3493afdd8bd841..9c90478f03dc5f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -20,69 +20,73 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AssertionType + implements com.linkedin.datahub.graphql.types.EntityType { -public class AssertionType implements com.linkedin.datahub.graphql.types.EntityType { + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ASSERTION_KEY_ASPECT_NAME, + Constants.ASSERTION_INFO_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.ASSERTION_ACTIONS_ASPECT_NAME); + private final EntityClient _entityClient; - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ASSERTION_KEY_ASPECT_NAME, - Constants.ASSERTION_INFO_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME - ); - private final EntityClient _entityClient; + public AssertionType(final EntityClient entityClient) { + _entityClient = entityClient; + } - public AssertionType(final EntityClient entityClient) { - _entityClient = entityClient; - } + @Override + public EntityType type() { + return EntityType.ASSERTION; + } - @Override - public EntityType type() { - return EntityType.ASSERTION; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return Assertion.class; - } + @Override + public Class objectClass() { + return Assertion.class; + } - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List assertionUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List assertionUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(AssertionMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Assertions", e); - } + final List gmsResults = new ArrayList<>(urns.size()); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AssertionMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Assertions", e); } + } - private Urn getUrn(final String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); - } + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } -} \ No newline at end of file + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapper.java new file mode 100644 index 00000000000000..82d041a464c3fb --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapper.java @@ -0,0 +1,92 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.FieldAssertionInfo; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AssertionStdOperator; +import com.linkedin.datahub.graphql.generated.FieldAssertionType; +import com.linkedin.datahub.graphql.generated.FieldMetricType; +import com.linkedin.datahub.graphql.generated.FieldTransformType; +import com.linkedin.datahub.graphql.generated.FieldValuesFailThresholdType; +import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetFilterMapper; +import javax.annotation.Nullable; + +public class FieldAssertionMapper extends AssertionMapper { + + public static com.linkedin.datahub.graphql.generated.FieldAssertionInfo mapFieldAssertionInfo( + @Nullable final QueryContext context, final FieldAssertionInfo gmsFieldAssertionInfo) { + final com.linkedin.datahub.graphql.generated.FieldAssertionInfo result = + new com.linkedin.datahub.graphql.generated.FieldAssertionInfo(); + result.setEntityUrn(gmsFieldAssertionInfo.getEntity().toString()); + result.setType(FieldAssertionType.valueOf(gmsFieldAssertionInfo.getType().name())); + if (gmsFieldAssertionInfo.hasFilter()) { + result.setFilter(DatasetFilterMapper.map(context, gmsFieldAssertionInfo.getFilter())); + } + if (gmsFieldAssertionInfo.hasFieldValuesAssertion()) { + result.setFieldValuesAssertion( + mapFieldValuesAssertion(gmsFieldAssertionInfo.getFieldValuesAssertion())); + } + if (gmsFieldAssertionInfo.hasFieldMetricAssertion()) { + result.setFieldMetricAssertion( + mapFieldMetricAssertion(gmsFieldAssertionInfo.getFieldMetricAssertion())); + } + return result; + } + + private static com.linkedin.datahub.graphql.generated.FieldValuesAssertion + mapFieldValuesAssertion( + final com.linkedin.assertion.FieldValuesAssertion gmsFieldValuesAssertion) { + final com.linkedin.datahub.graphql.generated.FieldValuesAssertion result = + new com.linkedin.datahub.graphql.generated.FieldValuesAssertion(); + result.setField(mapSchemaFieldSpec(gmsFieldValuesAssertion.getField())); + result.setOperator(AssertionStdOperator.valueOf(gmsFieldValuesAssertion.getOperator().name())); + result.setFailThreshold( + mapFieldValuesFailThreshold(gmsFieldValuesAssertion.getFailThreshold())); + result.setExcludeNulls(gmsFieldValuesAssertion.isExcludeNulls()); + + if (gmsFieldValuesAssertion.hasTransform()) { + result.setTransform(mapFieldTransform(gmsFieldValuesAssertion.getTransform())); + } + + if (gmsFieldValuesAssertion.hasParameters()) { + result.setParameters(mapParameters(gmsFieldValuesAssertion.getParameters())); + } + return result; + } + + private static com.linkedin.datahub.graphql.generated.FieldMetricAssertion + mapFieldMetricAssertion( + final com.linkedin.assertion.FieldMetricAssertion gmsFieldMetricAssertion) { + final com.linkedin.datahub.graphql.generated.FieldMetricAssertion result = + new com.linkedin.datahub.graphql.generated.FieldMetricAssertion(); + result.setField(mapSchemaFieldSpec(gmsFieldMetricAssertion.getField())); + result.setMetric(FieldMetricType.valueOf(gmsFieldMetricAssertion.getMetric().name())); + result.setOperator(AssertionStdOperator.valueOf(gmsFieldMetricAssertion.getOperator().name())); + + if (gmsFieldMetricAssertion.hasParameters()) { + result.setParameters(mapParameters(gmsFieldMetricAssertion.getParameters())); + } + + return result; + } + + private static com.linkedin.datahub.graphql.generated.FieldTransform mapFieldTransform( + final com.linkedin.assertion.FieldTransform gmsFieldTransform) { + final com.linkedin.datahub.graphql.generated.FieldTransform result = + new com.linkedin.datahub.graphql.generated.FieldTransform(); + result.setType(FieldTransformType.valueOf(gmsFieldTransform.getType().name())); + return result; + } + + private static com.linkedin.datahub.graphql.generated.FieldValuesFailThreshold + mapFieldValuesFailThreshold( + final com.linkedin.assertion.FieldValuesFailThreshold gmsFieldValuesFailThreshold) { + final com.linkedin.datahub.graphql.generated.FieldValuesFailThreshold result = + new com.linkedin.datahub.graphql.generated.FieldValuesFailThreshold(); + result.setType( + FieldValuesFailThresholdType.valueOf(gmsFieldValuesFailThreshold.getType().name())); + result.setValue(gmsFieldValuesFailThreshold.getValue()); + return result; + } + + private FieldAssertionMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapper.java new file mode 100644 index 00000000000000..22e1c1d8bae9ea --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapper.java @@ -0,0 +1,59 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.FreshnessAssertionInfo; +import com.linkedin.datahub.graphql.generated.FreshnessAssertionSchedule; +import com.linkedin.datahub.graphql.generated.FreshnessAssertionScheduleType; +import com.linkedin.datahub.graphql.generated.FreshnessAssertionType; +import com.linkedin.datahub.graphql.generated.FreshnessCronSchedule; +import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetFilterMapper; +import javax.annotation.Nullable; + +public class FreshnessAssertionMapper extends AssertionMapper { + + public static FreshnessAssertionInfo mapFreshnessAssertionInfo( + @Nullable final QueryContext context, + final com.linkedin.assertion.FreshnessAssertionInfo gmsFreshnessAssertionInfo) { + FreshnessAssertionInfo freshnessAssertionInfo = new FreshnessAssertionInfo(); + freshnessAssertionInfo.setEntityUrn(gmsFreshnessAssertionInfo.getEntity().toString()); + freshnessAssertionInfo.setType( + FreshnessAssertionType.valueOf(gmsFreshnessAssertionInfo.getType().name())); + if (gmsFreshnessAssertionInfo.hasSchedule()) { + freshnessAssertionInfo.setSchedule( + mapFreshnessAssertionSchedule(gmsFreshnessAssertionInfo.getSchedule())); + } + if (gmsFreshnessAssertionInfo.hasFilter()) { + freshnessAssertionInfo.setFilter( + DatasetFilterMapper.map(context, gmsFreshnessAssertionInfo.getFilter())); + } + return freshnessAssertionInfo; + } + + private static FreshnessCronSchedule mapFreshnessCronSchedule( + final com.linkedin.assertion.FreshnessCronSchedule gmsCronSchedule) { + FreshnessCronSchedule cronSchedule = new FreshnessCronSchedule(); + cronSchedule.setCron(gmsCronSchedule.getCron()); + cronSchedule.setTimezone(gmsCronSchedule.getTimezone()); + cronSchedule.setWindowStartOffsetMs(gmsCronSchedule.getWindowStartOffsetMs(GetMode.NULL)); + return cronSchedule; + } + + private static FreshnessAssertionSchedule mapFreshnessAssertionSchedule( + final com.linkedin.assertion.FreshnessAssertionSchedule gmsFreshnessAssertionSchedule) { + FreshnessAssertionSchedule freshnessAssertionSchedule = new FreshnessAssertionSchedule(); + freshnessAssertionSchedule.setType( + FreshnessAssertionScheduleType.valueOf(gmsFreshnessAssertionSchedule.getType().name())); + if (gmsFreshnessAssertionSchedule.hasCron()) { + freshnessAssertionSchedule.setCron( + mapFreshnessCronSchedule(gmsFreshnessAssertionSchedule.getCron())); + } + if (gmsFreshnessAssertionSchedule.hasFixedInterval()) { + freshnessAssertionSchedule.setFixedInterval( + mapFixedIntervalSchedule(gmsFreshnessAssertionSchedule.getFixedInterval())); + } + return freshnessAssertionSchedule; + } + + private FreshnessAssertionMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapper.java new file mode 100644 index 00000000000000..e75d2221164d4d --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapper.java @@ -0,0 +1,27 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.SqlAssertionInfo; +import com.linkedin.datahub.graphql.generated.AssertionStdOperator; +import com.linkedin.datahub.graphql.generated.AssertionValueChangeType; +import com.linkedin.datahub.graphql.generated.SqlAssertionType; + +public class SqlAssertionMapper extends AssertionMapper { + + public static com.linkedin.datahub.graphql.generated.SqlAssertionInfo mapSqlAssertionInfo( + final SqlAssertionInfo gmsSqlAssertionInfo) { + final com.linkedin.datahub.graphql.generated.SqlAssertionInfo result = + new com.linkedin.datahub.graphql.generated.SqlAssertionInfo(); + result.setEntityUrn(gmsSqlAssertionInfo.getEntity().toString()); + result.setType(SqlAssertionType.valueOf(gmsSqlAssertionInfo.getType().name())); + result.setStatement(gmsSqlAssertionInfo.getStatement()); + result.setOperator(AssertionStdOperator.valueOf(gmsSqlAssertionInfo.getOperator().name())); + result.setParameters(mapParameters(gmsSqlAssertionInfo.getParameters())); + if (gmsSqlAssertionInfo.hasChangeType()) { + result.setChangeType( + AssertionValueChangeType.valueOf(gmsSqlAssertionInfo.getChangeType().name())); + } + return result; + } + + private SqlAssertionMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapper.java new file mode 100644 index 00000000000000..3d0294c45e5205 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapper.java @@ -0,0 +1,115 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.VolumeAssertionInfo; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AssertionStdOperator; +import com.linkedin.datahub.graphql.generated.AssertionValueChangeType; +import com.linkedin.datahub.graphql.generated.IncrementingSegmentFieldTransformerType; +import com.linkedin.datahub.graphql.generated.VolumeAssertionType; +import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetFilterMapper; +import javax.annotation.Nullable; + +public class VolumeAssertionMapper extends AssertionMapper { + + public static com.linkedin.datahub.graphql.generated.VolumeAssertionInfo mapVolumeAssertionInfo( + @Nullable final QueryContext context, final VolumeAssertionInfo gmsVolumeAssertionInfo) { + final com.linkedin.datahub.graphql.generated.VolumeAssertionInfo result = + new com.linkedin.datahub.graphql.generated.VolumeAssertionInfo(); + result.setEntityUrn(gmsVolumeAssertionInfo.getEntity().toString()); + result.setType(VolumeAssertionType.valueOf(gmsVolumeAssertionInfo.getType().name())); + if (gmsVolumeAssertionInfo.hasFilter()) { + result.setFilter(DatasetFilterMapper.map(context, gmsVolumeAssertionInfo.getFilter())); + } + if (gmsVolumeAssertionInfo.hasRowCountTotal()) { + result.setRowCountTotal(mapRowCountTotal(gmsVolumeAssertionInfo.getRowCountTotal())); + } + if (gmsVolumeAssertionInfo.hasRowCountChange()) { + result.setRowCountChange(mapRowCountChange(gmsVolumeAssertionInfo.getRowCountChange())); + } + if (gmsVolumeAssertionInfo.hasIncrementingSegmentRowCountTotal()) { + result.setIncrementingSegmentRowCountTotal( + mapIncrementingSegmentRowCountTotal( + gmsVolumeAssertionInfo.getIncrementingSegmentRowCountTotal())); + } + if (gmsVolumeAssertionInfo.hasIncrementingSegmentRowCountChange()) { + result.setIncrementingSegmentRowCountChange( + mapIncrementingSegmentRowCountChange( + gmsVolumeAssertionInfo.getIncrementingSegmentRowCountChange())); + } + return result; + } + + private static com.linkedin.datahub.graphql.generated.RowCountTotal mapRowCountTotal( + final com.linkedin.assertion.RowCountTotal gmsRowCountTotal) { + final com.linkedin.datahub.graphql.generated.RowCountTotal result = + new com.linkedin.datahub.graphql.generated.RowCountTotal(); + result.setOperator(AssertionStdOperator.valueOf(gmsRowCountTotal.getOperator().name())); + result.setParameters(mapParameters(gmsRowCountTotal.getParameters())); + return result; + } + + private static com.linkedin.datahub.graphql.generated.RowCountChange mapRowCountChange( + final com.linkedin.assertion.RowCountChange gmsRowCountChange) { + final com.linkedin.datahub.graphql.generated.RowCountChange result = + new com.linkedin.datahub.graphql.generated.RowCountChange(); + result.setOperator(AssertionStdOperator.valueOf(gmsRowCountChange.getOperator().name())); + result.setParameters(mapParameters(gmsRowCountChange.getParameters())); + result.setType(AssertionValueChangeType.valueOf(gmsRowCountChange.getType().name())); + return result; + } + + private static com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountTotal + mapIncrementingSegmentRowCountTotal( + final com.linkedin.assertion.IncrementingSegmentRowCountTotal + gmsIncrementingSegmentRowCountTotal) { + final com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountTotal result = + new com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountTotal(); + result.setOperator( + AssertionStdOperator.valueOf(gmsIncrementingSegmentRowCountTotal.getOperator().name())); + result.setParameters(mapParameters(gmsIncrementingSegmentRowCountTotal.getParameters())); + result.setSegment(mapIncrementingSegmentSpec(gmsIncrementingSegmentRowCountTotal.getSegment())); + return result; + } + + private static com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountChange + mapIncrementingSegmentRowCountChange( + final com.linkedin.assertion.IncrementingSegmentRowCountChange + gmsIncrementingSegmentRowCountChange) { + final com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountChange result = + new com.linkedin.datahub.graphql.generated.IncrementingSegmentRowCountChange(); + result.setOperator( + AssertionStdOperator.valueOf(gmsIncrementingSegmentRowCountChange.getOperator().name())); + result.setParameters(mapParameters(gmsIncrementingSegmentRowCountChange.getParameters())); + result.setSegment( + mapIncrementingSegmentSpec(gmsIncrementingSegmentRowCountChange.getSegment())); + result.setType( + AssertionValueChangeType.valueOf(gmsIncrementingSegmentRowCountChange.getType().name())); + return result; + } + + private static com.linkedin.datahub.graphql.generated.IncrementingSegmentSpec + mapIncrementingSegmentSpec(final com.linkedin.assertion.IncrementingSegmentSpec gmsSegment) { + final com.linkedin.datahub.graphql.generated.IncrementingSegmentSpec result = + new com.linkedin.datahub.graphql.generated.IncrementingSegmentSpec(); + result.setField(mapSchemaFieldSpec(gmsSegment.getField())); + if (gmsSegment.hasTransformer()) { + result.setTransformer(mapIncrementingSegmentFieldTransformer(gmsSegment.getTransformer())); + } + return result; + } + + private static com.linkedin.datahub.graphql.generated.IncrementingSegmentFieldTransformer + mapIncrementingSegmentFieldTransformer( + final com.linkedin.assertion.IncrementingSegmentFieldTransformer gmsTransformer) { + final com.linkedin.datahub.graphql.generated.IncrementingSegmentFieldTransformer result = + new com.linkedin.datahub.graphql.generated.IncrementingSegmentFieldTransformer(); + result.setType( + IncrementingSegmentFieldTransformerType.valueOf(gmsTransformer.getType().name())); + if (gmsTransformer.hasNativeType()) { + result.setNativeType(gmsTransformer.getNativeType()); + } + return result; + } + + private VolumeAssertionMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index d9f25a7cec8e1b..7f348c3f8022eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -4,9 +4,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.types.auth.mappers.AccessTokenMetadataMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class AccessTokenMetadataType implements com.linkedin.datahub.graphql.types.EntityType { @@ -48,23 +47,31 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List keys, - @Nonnull QueryContext context) throws Exception { - final List tokenInfoUrns = keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { + final List tokenInfoUrns = + keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(Constants.ACCESS_TOKEN_ENTITY_NAME, new HashSet<>(tokenInfoUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.ACCESS_TOKEN_ENTITY_NAME, + new HashSet<>(tokenInfoUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(keys.size()); for (Urn urn : tokenInfoUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AccessTokenMetadataMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Access Token Info", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index 9b387578798967..9c807bf0304add 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,26 +2,29 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; -import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class AccessTokenMetadataMapper implements ModelMapper { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); - public static AccessTokenMetadata map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static AccessTokenMetadata map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { + public AccessTokenMetadata apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse input) { final AccessTokenMetadata metadata = new AccessTokenMetadata(); metadata.setUrn(input.getUrn().toString()); @@ -29,13 +32,15 @@ public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { metadata.setType(EntityType.ACCESS_TOKEN); final EnvelopedAspectMap aspectMap = input.getAspects(); - final MappingHelper mappingHelper = new MappingHelper<>(aspectMap, metadata); + final MappingHelper mappingHelper = + new MappingHelper<>(aspectMap, metadata); mappingHelper.mapToResult(Constants.ACCESS_TOKEN_INFO_NAME, this::mapTokenInfo); return mappingHelper.getResult(); } - private void mapTokenInfo(@Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { + private void mapTokenInfo( + @Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(dataMap); accessTokenMetadata.setName(tokenInfo.getName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/BusinessAttributeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/BusinessAttributeType.java new file mode 100644 index 00000000000000..b9d6a9254de101 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/BusinessAttributeType.java @@ -0,0 +1,135 @@ +package com.linkedin.datahub.graphql.types.businessattribute; + +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AutoCompleteResults; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.businessattribute.mappers.BusinessAttributeMapper; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; +import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.AutoCompleteResult; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class BusinessAttributeType implements SearchableEntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + BUSINESS_ATTRIBUTE_KEY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + STATUS_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.BUSINESS_ATTRIBUTE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return BusinessAttribute.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List businessAttributeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map businessAttributeMap = + _entityClient.batchGetV2( + context.getOperationContext(), + BUSINESS_ATTRIBUTE_ENTITY_NAME, + new HashSet<>(businessAttributeUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : businessAttributeUrns) { + gmsResults.add(businessAttributeMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(BusinessAttributeMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Business Attributes", e); + } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "businessAttribute", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "businessAttribute", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributeMapper.java new file mode 100644 index 00000000000000..87230b24577163 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributeMapper.java @@ -0,0 +1,134 @@ +package com.linkedin.datahub.graphql.types.businessattribute.mappers; + +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class BusinessAttributeMapper implements ModelMapper { + + public static final BusinessAttributeMapper INSTANCE = new BusinessAttributeMapper(); + + public static BusinessAttribute map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public BusinessAttribute apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + BusinessAttribute result = new BusinessAttribute(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.BUSINESS_ATTRIBUTE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + ((businessAttribute, dataMap) -> + mapBusinessAttributeInfo( + context, businessAttribute, dataMap, entityResponse.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (businessAttribute, dataMap) -> + businessAttribute.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityResponse.getUrn()))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityResponse.getUrn()))); + return mappingHelper.getResult(); + } + + private void mapBusinessAttributeInfo( + final QueryContext context, + BusinessAttribute businessAttribute, + DataMap dataMap, + Urn entityUrn) { + BusinessAttributeInfo businessAttributeInfo = new BusinessAttributeInfo(dataMap); + com.linkedin.datahub.graphql.generated.BusinessAttributeInfo attributeInfo = + new com.linkedin.datahub.graphql.generated.BusinessAttributeInfo(); + if (businessAttributeInfo.hasFieldPath()) { + attributeInfo.setName(businessAttributeInfo.getFieldPath()); + } + if (businessAttributeInfo.hasDescription()) { + attributeInfo.setDescription(businessAttributeInfo.getDescription()); + } + if (businessAttributeInfo.hasCreated()) { + attributeInfo.setCreated(AuditStampMapper.map(context, businessAttributeInfo.getCreated())); + } + if (businessAttributeInfo.hasLastModified()) { + attributeInfo.setLastModified( + AuditStampMapper.map(context, businessAttributeInfo.getLastModified())); + } + if (businessAttributeInfo.hasGlobalTags()) { + attributeInfo.setTags( + GlobalTagsMapper.map(context, businessAttributeInfo.getGlobalTags(), entityUrn)); + } + if (businessAttributeInfo.hasGlossaryTerms()) { + attributeInfo.setGlossaryTerms( + GlossaryTermsMapper.map(context, businessAttributeInfo.getGlossaryTerms(), entityUrn)); + } + if (businessAttributeInfo.hasType()) { + attributeInfo.setType(mapSchemaFieldDataType(businessAttributeInfo.getType())); + } + if (businessAttributeInfo.hasCustomProperties()) { + attributeInfo.setCustomProperties( + CustomPropertiesMapper.map(businessAttributeInfo.getCustomProperties(), entityUrn)); + } + businessAttribute.setProperties(attributeInfo); + } + + private SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributesMapper.java new file mode 100644 index 00000000000000..104bc6ecd9222b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/businessattribute/mappers/BusinessAttributesMapper.java @@ -0,0 +1,49 @@ +package com.linkedin.datahub.graphql.types.businessattribute.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; +import com.linkedin.datahub.graphql.generated.BusinessAttributeAssociation; +import com.linkedin.datahub.graphql.generated.BusinessAttributes; +import com.linkedin.datahub.graphql.generated.EntityType; +import java.util.Objects; +import javax.annotation.Nonnull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class BusinessAttributesMapper { + + private static final Logger _logger = + LoggerFactory.getLogger(BusinessAttributesMapper.class.getName()); + public static final BusinessAttributesMapper INSTANCE = new BusinessAttributesMapper(); + + public static BusinessAttributes map( + @Nonnull final com.linkedin.businessattribute.BusinessAttributes businessAttributes, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(businessAttributes, entityUrn); + } + + private BusinessAttributes apply( + @Nonnull com.linkedin.businessattribute.BusinessAttributes businessAttributes, + @Nonnull Urn entityUrn) { + final BusinessAttributes result = new BusinessAttributes(); + result.setBusinessAttribute( + mapBusinessAttributeAssociation(businessAttributes.getBusinessAttribute(), entityUrn)); + return result; + } + + private BusinessAttributeAssociation mapBusinessAttributeAssociation( + com.linkedin.businessattribute.BusinessAttributeAssociation businessAttributeAssociation, + Urn entityUrn) { + if (Objects.isNull(businessAttributeAssociation)) { + return null; + } + final BusinessAttributeAssociation businessAttributeAssociationResult = + new BusinessAttributeAssociation(); + final BusinessAttribute businessAttribute = new BusinessAttribute(); + businessAttribute.setUrn(businessAttributeAssociation.getBusinessAttributeUrn().toString()); + businessAttribute.setType(EntityType.BUSINESS_ATTRIBUTE); + businessAttributeAssociationResult.setBusinessAttribute(businessAttribute); + businessAttributeAssociationResult.setAssociatedUrn(entityUrn.toString()); + return businessAttributeAssociationResult; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index cfec8f8a2391f6..fe9b511f4a7dde 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.chart; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.ChartUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,7 +40,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,202 +56,221 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class ChartType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + CHART_KEY_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, + CHART_QUERY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); + private static final Set FACET_FIELDS = + ImmutableSet.of("access", "queryType", "tool", "type"); -public class ChartType implements SearchableEntityType, BrowsableEntityType, MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - CHART_KEY_ASPECT_NAME, - CHART_INFO_ASPECT_NAME, - EDITABLE_CHART_PROPERTIES_ASPECT_NAME, - CHART_QUERY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); - - private final EntityClient _entityClient; - - public ChartType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return ChartUpdateInput.class; - } + public ChartType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.CHART; - } + @Override + public Class inputClass() { + return ChartUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.CHART; + } - @Override - public Class objectClass() { - return Chart.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map chartMap = - _entityClient.batchGetV2( - CHART_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(chartMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsChart -> gmsChart == null ? null : DataFetcherResult.newResult() - .data(ChartMapper.map(gmsChart)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Charts", e); - } - } + @Override + public Class objectClass() { + return Chart.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "chart", - query, - facetFilters, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + + final Map chartMap = + _entityClient.batchGetV2( + context.getOperationContext(), + CHART_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); + + final List gmsResults = new ArrayList<>(urnStrs.size()); + for (Urn urn : urns) { + gmsResults.add(chartMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsChart -> + gmsChart == null + ? null + : DataFetcherResult.newResult() + .data(ChartMapper.map(context, gmsChart)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Charts", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "chart", query, - filters, - limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "chart", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(context.getOperationContext(), "chart", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "chart", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } - private ChartUrn getChartUrn(String urnStr) { - try { - return ChartUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); - } - } + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(context.getOperationContext(), getChartUrn(urn)); + return BrowsePathsMapper.map(context, result); + } - @Override - public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = ChartUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + private ChartUrn getChartUrn(String urnStr) { + try { + return ChartUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); } + } + + @Override + public Chart update( + @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + ChartUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CHART_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.CHART_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 657c9b688aed20..561c3b9bec1e03 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,18 +1,25 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InputFields; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.ChartEditableProperties; @@ -27,18 +34,21 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -46,185 +56,252 @@ import com.linkedin.metadata.key.ChartKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; +public class ChartMapper implements ModelMapper { + public static final ChartMapper INSTANCE = new ChartMapper(); -public class ChartMapper implements ModelMapper { + public static Chart map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - public static final ChartMapper INSTANCE = new ChartMapper(); + @Override + public Chart apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final Chart result = new Chart(); + Urn entityUrn = entityResponse.getUrn(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CHART); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapChartInfo(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); + mappingHelper.mapToResult( + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (chart, dataMap) -> + chart.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (chart, dataMap) -> chart.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(context, dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (chart, dataMap) -> + chart.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (chart, dataMap) -> + chart.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, ChartMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, ChartMapper::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (chart, dataMap) -> + chart.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (chart, dataMap) -> + chart.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (chart, dataMap) -> + chart.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((chart, dataMap) -> + chart.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { - final Chart result = new Chart(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CHART); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); - mappingHelper.mapToResult(EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> - chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (chart, dataMap) -> - chart.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> - chart.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> - chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (chart, dataMap) -> - chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> - chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) -> - chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> - chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Chart.class); + } else { + return mappingHelper.getResult(); } + } - private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final ChartKey gmsKey = new ChartKey(dataMap); - chart.setChartId(gmsKey.getChartId()); - chart.setTool(gmsKey.getDashboardTool()); - chart.setPlatform(DataPlatform.builder() + private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final ChartKey gmsKey = new ChartKey(dataMap); + chart.setChartId(gmsKey.getChartId()); + chart.setTool(gmsKey.getDashboardTool()); + chart.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapChartInfo(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); - } + private void mapChartInfo( + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); + chart.setInfo(mapInfo(context, gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(context, gmsChartInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} - */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartInfo result = new ChartInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasInputs()) { - result.setInputs(info.getInputs().stream().map(input -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(input.getDatasetUrn().toString()); - return dataset; - }).collect(Collectors.toList())); - } - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; - } + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ + private ChartInfo mapInfo( + @Nonnull QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { + final ChartInfo result = new ChartInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} - */ - private ChartProperties mapChartInfoToProperties(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartProperties result = new ChartProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasInputs()) { + result.setInputs( + info.getInputs().stream() + .map( + input -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(input.getDatasetUrn().toString()); + return dataset; + }) + .collect(Collectors.toList())); } - private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); - chart.setQuery(mapQuery(gmsChartQuery)); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { - final ChartQuery result = new ChartQuery(); - result.setRawQuery(query.getRawQuery()); - result.setType(ChartQueryType.valueOf(query.getType().toString())); - return result; + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); } - - private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); - final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); - chartEditableProperties.setDescription(editableChartProperties.getDescription()); - chart.setEditableProperties(chartEditableProperties); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ + private static ChartProperties mapChartInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { + final ChartProperties result = new ChartProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - private void mapGlobalTags(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - chart.setGlobalTags(globalTags); - chart.setTags(globalTags); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); + } + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); + chart.setQuery(mapQuery(gmsChartQuery)); + } + + private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { + final ChartQuery result = new ChartQuery(); + result.setRawQuery(query.getRawQuery()); + result.setType(ChartQueryType.valueOf(query.getType().toString())); + return result; + } + + private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); + final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); + chartEditableProperties.setDescription(editableChartProperties.getDescription()); + chart.setEditableProperties(chartEditableProperties); + } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - chart.setContainer(Container - .builder() + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + chart.setGlobalTags(globalTags); + chart.setTags(globalTags); + } + + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + chart.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); - } + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + chart.setDomain(DomainAssociationMapper.map(context, domains, chart.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index b52ddad0b0071e..806e537c6ec261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChartUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -16,69 +19,71 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; +public class ChartUpdateInputMapper + implements InputModelMapper, Urn> { + public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); -public class ChartUpdateInputMapper implements InputModelMapper, Urn> { + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, chartUpdateInput, actor); + } - public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); + @Override + public Collection apply( + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - public static Collection map(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + if (chartUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, chartUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } - @Override - public Collection apply(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - - if (chartUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper - .aspectToProposal(OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); - } - - if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (chartUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - // Tags overrides global tags if provided - if (chartUpdateInput.getTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } - - if (chartUpdateInput.getEditableProperties() != null) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(); - editableChartProperties.setDescription(chartUpdateInput.getEditableProperties().getDescription()); - if (!editableChartProperties.hasCreated()) { - editableChartProperties.setCreated(auditStamp); - } - editableChartProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); - } + if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (chartUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); + } + // Tags overrides global tags if provided + if (chartUpdateInput.getTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (chartUpdateInput.getEditableProperties() != null) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(); + editableChartProperties.setDescription( + chartUpdateInput.getEditableProperties().getDescription()); + if (!editableChartProperties.hasCreated()) { + editableChartProperties.setCreated(auditStamp); + } + editableChartProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index d6ef713f3ade6b..269fb7d4ddf793 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -2,34 +2,61 @@ import com.linkedin.common.InputFields; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InputField; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaFieldMapper; +import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; - +@Slf4j public class InputFieldsMapper { - public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); - - public static com.linkedin.datahub.graphql.generated.InputFields map(@Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.InputFields apply(@Nonnull final InputFields input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); - result.setFields(input.getFields().stream().map(field -> { - InputField fieldResult = new InputField(); - - if (field.hasSchemaField()) { - fieldResult.setSchemaField(SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); - } - if (field.hasSchemaFieldUrn()) { - fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); - } - return fieldResult; - }).collect(Collectors.toList())); - - return result; - } + public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); + + public static com.linkedin.datahub.graphql.generated.InputFields map( + @Nullable final QueryContext context, + @Nonnull final InputFields metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.InputFields apply( + @Nullable final QueryContext context, + @Nonnull final InputFields input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.InputFields result = + new com.linkedin.datahub.graphql.generated.InputFields(); + result.setFields( + input.getFields().stream() + .map( + field -> { + InputField fieldResult = new InputField(); + Urn parentUrn = entityUrn; + + if (field.hasSchemaFieldUrn()) { + fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); + try { + parentUrn = + Urn.createFromString(field.getSchemaFieldUrn().getEntityKey().get(0)); + } catch (URISyntaxException e) { + log.error( + "Field urn resolution: failed to extract parentUrn successfully from {}. Falling back to {}", + field.getSchemaFieldUrn(), + entityUrn, + e); + } + } + if (field.hasSchemaField()) { + fieldResult.setSchemaField( + SchemaFieldMapper.map(context, field.getSchemaField(), parentUrn)); + } + return fieldResult; + }) + .collect(Collectors.toList())); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index beb2b64e1dd7d5..851569a6cc5827 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,29 +1,31 @@ package com.linkedin.datahub.graphql.types.common.mappers; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class AuditStampMapper implements ModelMapper { - public static final AuditStampMapper INSTANCE = new AuditStampMapper(); + public static final AuditStampMapper INSTANCE = new AuditStampMapper(); - public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - return INSTANCE.apply(auditStamp); - } + public static AuditStamp map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.AuditStamp auditStamp) { + return INSTANCE.apply(context, auditStamp); + } - @Override - public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - final AuditStamp result = new AuditStamp(); - result.setActor(auditStamp.getActor().toString()); - result.setTime(auditStamp.getTime()); - return result; - } + @Override + public AuditStamp apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.AuditStamp auditStamp) { + final AuditStamp result = new AuditStamp(); + result.setActor(auditStamp.getActor().toString()); + result.setTime(auditStamp.getTime()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java index 41ee99fa412ad1..d7f542a26dc017 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java @@ -1,35 +1,41 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.BrowsePathsV2; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowsePathV2; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathsV2Mapper implements ModelMapper { public static final BrowsePathsV2Mapper INSTANCE = new BrowsePathsV2Mapper(); - public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { - return INSTANCE.apply(metadata); + public static BrowsePathV2 map( + @Nullable QueryContext context, @Nonnull final BrowsePathsV2 metadata) { + return INSTANCE.apply(context, metadata); } @Override - public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { + public BrowsePathV2 apply(@Nullable QueryContext context, @Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); - final List path = input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + final List path = + input.getPath().stream() + .map(p -> mapBrowsePathEntry(context, p)) + .collect(Collectors.toList()); result.setPath(path); return result; } - private BrowsePathEntry mapBrowsePathEntry(com.linkedin.common.BrowsePathEntry pathEntry) { + private BrowsePathEntry mapBrowsePathEntry( + @Nullable QueryContext context, com.linkedin.common.BrowsePathEntry pathEntry) { final BrowsePathEntry entry = new BrowsePathEntry(); entry.setName(pathEntry.getId()); if (pathEntry.hasUrn() && pathEntry.getUrn() != null) { - entry.setEntity(UrnToEntityMapper.map(pathEntry.getUrn())); + entry.setEntity(UrnToEntityMapper.map(context, pathEntry.getUrn())); } return entry; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index 7144730ba9337e..14fd1c82d5df7a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; - -public class ChangeAuditStampsMapper implements ModelMapper { +public class ChangeAuditStampsMapper + implements ModelMapper { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); - public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { - return INSTANCE.apply(input); + public static ChangeAuditStamps map( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { + return INSTANCE.apply(context, input); } @Override - public ChangeAuditStamps apply(com.linkedin.common.ChangeAuditStamps input) { + public ChangeAuditStamps apply( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); - changeAuditStamps.setCreated(AuditStampMapper.map(input.getCreated())); - changeAuditStamps.setLastModified(AuditStampMapper.map(input.getLastModified())); + changeAuditStamps.setCreated(AuditStampMapper.map(context, input.getCreated())); + changeAuditStamps.setLastModified(AuditStampMapper.map(context, input.getLastModified())); if (input.hasDeleted()) { - changeAuditStamps.setDeleted(AuditStampMapper.map(input.getDeleted())); + changeAuditStamps.setDeleted(AuditStampMapper.map(context, input.getDeleted())); } return changeAuditStamps; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 6c8bdada17b242..bb35a6da984189 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,26 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.NonNull; public class CostMapper implements ModelMapper { - public static final CostMapper INSTANCE = new CostMapper(); + public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); - } + public static Cost map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(context, cost); + } - @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { - final Cost result = new Cost(); - result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); - return result; - } + @Override + public Cost apply(@Nullable QueryContext context, @Nonnull final com.linkedin.common.Cost cost) { + final Cost result = new Cost(); + result.setCostType(CostType.valueOf(cost.getCostType().name())); + result.setCostValue(CostValueMapper.map(context, cost.getCost())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 3f41c92cd17154..c71c2274362b8b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -1,26 +1,29 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; public class CostValueMapper implements ModelMapper { - public static final CostValueMapper INSTANCE = new CostValueMapper(); + public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); - } + public static CostValue map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(context, costValue); + } - @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { - final CostValue result = new CostValue(); - if (costValue.isCostCode()) { - result.setCostCode(costValue.getCostCode()); - } - if (costValue.isCostId()) { - result.setCostId(costValue.getCostId().floatValue()); - } - return result; + @Override + public CostValue apply( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { + final CostValue result = new CostValue(); + if (costValue.isCostCode()) { + result.setCostCode(costValue.getCostCode()); + } + if (costValue.isCostId()) { + result.setCostId(costValue.getCostId().floatValue()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java index 50e4846611a9b0..b09678ddeb42ed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java @@ -1,36 +1,36 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.CustomPropertiesEntry; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CustomPropertiesMapper { - public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); + public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); - public static List map(@Nonnull final Map input, @Nonnull Urn urn) { - return INSTANCE.apply(input, urn); - } + public static List map( + @Nonnull final Map input, @Nonnull Urn urn) { + return INSTANCE.apply(input, urn); + } - public List apply(@Nonnull final Map input, @Nonnull Urn urn) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final CustomPropertiesEntry entry = new CustomPropertiesEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - entry.setAssociatedUrn(urn.toString()); - results.add(entry); - } - return results; + public List apply( + @Nonnull final Map input, @Nonnull Urn urn) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final CustomPropertiesEntry entry = new CustomPropertiesEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + entry.setAssociatedUrn(urn.toString()); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 1f10cd6ee3658b..4345819867617b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -1,21 +1,28 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DataPlatformInstanceAspectMapper implements ModelMapper { +public class DataPlatformInstanceAspectMapper + implements ModelMapper { - public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper(); + public static final DataPlatformInstanceAspectMapper INSTANCE = + new DataPlatformInstanceAspectMapper(); - public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { - return INSTANCE.apply(dataPlatformInstance); + public static DataPlatformInstance map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { + return INSTANCE.apply(context, dataPlatformInstance); } @Override - public DataPlatformInstance apply(@Nonnull final com.linkedin.common.DataPlatformInstance input) { + public DataPlatformInstance apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.DataPlatformInstance input) { final DataPlatformInstance result = new DataPlatformInstance(); if (input.hasInstance()) { result.setType(EntityType.DATA_PLATFORM_INSTANCE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 4bbf50bb723629..6959a6dcbd0393 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,24 +1,29 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DeprecationMapper implements ModelMapper { - public static final DeprecationMapper INSTANCE = new DeprecationMapper(); +public class DeprecationMapper + implements ModelMapper { + public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(context, deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setActorEntity(UrnToEntityMapper.map(context, input.getActor())); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DocumentationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DocumentationMapper.java new file mode 100644 index 00000000000000..dcb4921d353981 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DocumentationMapper.java @@ -0,0 +1,54 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubConnection; +import com.linkedin.datahub.graphql.generated.Documentation; +import com.linkedin.datahub.graphql.generated.DocumentationAssociation; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DocumentationMapper + implements ModelMapper { + + public static final DocumentationMapper INSTANCE = new DocumentationMapper(); + + public static Documentation map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.Documentation metadata) { + return INSTANCE.apply(context, metadata); + } + + @Override + public Documentation apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.Documentation input) { + final Documentation result = new Documentation(); + result.setDocumentations( + input.getDocumentations().stream() + .map(docAssociation -> mapDocAssociation(context, docAssociation)) + .collect(Collectors.toList())); + return result; + } + + private DocumentationAssociation mapDocAssociation( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.DocumentationAssociation association) { + final DocumentationAssociation result = new DocumentationAssociation(); + result.setDocumentation(association.getDocumentation()); + if (association.getAttribution() != null) { + result.setAttribution(MetadataAttributionMapper.map(context, association.getAttribution())); + } + return result; + } + + private DataHubConnection mapConnectionEntity(@Nonnull final Urn urn) { + DataHubConnection connection = new DataHubConnection(); + connection.setUrn(urn.toString()); + connection.setType(EntityType.DATAHUB_CONNECTION); + return connection; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 478d256df66a4b..51801c43061e8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -1,20 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EmbedMapper implements ModelMapper { public static final EmbedMapper INSTANCE = new EmbedMapper(); - public static Embed map(@Nonnull final com.linkedin.common.Embed metadata) { - return INSTANCE.apply(metadata); + public static Embed map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed metadata) { + return INSTANCE.apply(context, metadata); } @Override - public Embed apply(@Nonnull final com.linkedin.common.Embed input) { + public Embed apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed input) { final Embed result = new Embed(); result.setRenderUrl(input.getRenderUrl()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 9f4517c89a6dc3..c1483f8d6d9638 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -1,44 +1,55 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.FineGrainedLineage; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; import com.linkedin.dataset.FineGrainedLineageArray; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import javax.annotation.Nonnull; public class FineGrainedLineagesMapper { public static final FineGrainedLineagesMapper INSTANCE = new FineGrainedLineagesMapper(); - public static List map(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { + public static List map( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { return INSTANCE.apply(fineGrainedLineages); } - public List apply(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { - final List result = new ArrayList<>(); + public List apply( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { + final List result = + new ArrayList<>(); if (fineGrainedLineages.size() == 0) { return result; } for (com.linkedin.dataset.FineGrainedLineage fineGrainedLineage : fineGrainedLineages) { - com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = + new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); if (fineGrainedLineage.hasUpstreams()) { - resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setUpstreams( + fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } if (fineGrainedLineage.hasDownstreams()) { - resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setDownstreams( + fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); + } + if (fineGrainedLineage.hasQuery()) { + resultEntry.setQuery(fineGrainedLineage.getQuery().toString()); + } + if (fineGrainedLineage.hasTransformOperation()) { + resultEntry.setTransformOperation(fineGrainedLineage.getTransformOperation()); } result.add(resultEntry); } @@ -46,8 +57,7 @@ public List apply(@No } private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { - return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + return new SchemaFieldRef( + schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java new file mode 100644 index 00000000000000..cf0603d6d49739 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.GroupingCriterion; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class GroupingCriterionInputMapper + implements ModelMapper { + + public static final GroupingCriterionInputMapper INSTANCE = new GroupingCriterionInputMapper(); + + public static com.linkedin.metadata.query.GroupingCriterion map( + @Nullable QueryContext context, @Nonnull final GroupingCriterion groupingCriterion) { + return INSTANCE.apply(context, groupingCriterion); + } + + @Override + public com.linkedin.metadata.query.GroupingCriterion apply( + @Nullable QueryContext context, GroupingCriterion input) { + return new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType( + input.getBaseEntityType() != null + ? EntityTypeMapper.getName(input.getBaseEntityType()) + : null, + SetMode.REMOVE_OPTIONAL_IF_NULL) + .setGroupingEntityType(EntityTypeMapper.getName(input.getGroupingEntityType())); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 8bcfe7eb3b6d00..c57e7fd30da988 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -1,23 +1,32 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMapper { - public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); + public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); - public static InstitutionalMemory map(@Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); - } + public static InstitutionalMemory map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory memory, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, memory, entityUrn); + } - public InstitutionalMemory apply(@Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { - final InstitutionalMemory result = new InstitutionalMemory(); - result.setElements(input.getElements().stream().map(metadata -> - InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)).collect(Collectors.toList())); - return result; - } + public InstitutionalMemory apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemory result = new InstitutionalMemory(); + result.setElements( + input.getElements().stream() + .map(metadata -> InstitutionalMemoryMetadataMapper.map(context, metadata, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index ba4d37173abb83..7c6de02ecc8767 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,33 +1,41 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; - +import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMetadataMapper { - public static final InstitutionalMemoryMetadataMapper INSTANCE = new InstitutionalMemoryMetadataMapper(); + public static final InstitutionalMemoryMetadataMapper INSTANCE = + new InstitutionalMemoryMetadataMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static InstitutionalMemoryMetadata map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); + } - public InstitutionalMemoryMetadata apply(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { - final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); - result.setUrl(input.getUrl().toString()); - result.setDescription(input.getDescription()); // deprecated field - result.setLabel(input.getDescription()); - result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); + result.setUrl(input.getUrl().toString()); + result.setDescription(input.getDescription()); // deprecated field + result.setLabel(input.getDescription()); + result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); + result.setCreated(AuditStampMapper.map(context, input.getCreateStamp())); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } - private CorpUser getAuthor(String actor) { - CorpUser partialUser = new CorpUser(); - partialUser.setUrn(actor); - return partialUser; - } + private CorpUser getAuthor(String actor) { + CorpUser partialUser = new CorpUser(); + partialUser.setUrn(actor); + return partialUser; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 28986dcae57251..0219f91e60e6d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -1,31 +1,37 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper { +public class InstitutionalMemoryMetadataUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = new InstitutionalMemoryMetadataUpdateMapper(); + private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = + new InstitutionalMemoryMetadataUpdateMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemoryMetadata map( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(context, input); + } - @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); - metadata.setDescription(input.getDescription()); - metadata.setUrl(new Url(input.getUrl())); - metadata.setCreateStamp(new AuditStamp() + @Override + public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { + final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); + metadata.setDescription(input.getDescription()); + metadata.setUrl(new Url(input.getUrl())); + metadata.setCreateStamp( + new AuditStamp() .setActor(CorpUserUtils.getCorpUserUrn(input.getAuthor())) - .setTime(input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt()) - ); - return metadata; - } + .setTime( + input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt())); + return metadata; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index bf063896290eba..d8bdd354d4ad52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -1,30 +1,34 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - -import javax.annotation.Nonnull; - import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class InstitutionalMemoryUpdateMapper implements ModelMapper { +public class InstitutionalMemoryUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); + private static final InstitutionalMemoryUpdateMapper INSTANCE = + new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemory map( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(context, input); + } - @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { - final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); - institutionalMemory.setElements(new InstitutionalMemoryMetadataArray( - input.getElements() - .stream() - .map(InstitutionalMemoryMetadataUpdateMapper::map) + @Override + public InstitutionalMemory apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { + final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); + institutionalMemory.setElements( + new InstitutionalMemoryMetadataArray( + input.getElements().stream() + .map(e -> InstitutionalMemoryMetadataUpdateMapper.map(context, e)) .collect(Collectors.toList()))); - return institutionalMemory; - } + return institutionalMemory; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java new file mode 100644 index 00000000000000..87664ef2af4c74 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/LineageFlagsInputMapper.java @@ -0,0 +1,76 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.UrnArray; +import com.linkedin.common.UrnArrayMap; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityTypeToPlatforms; +import com.linkedin.datahub.graphql.generated.LineageFlags; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * Maps GraphQL SearchFlags to Pegasus + * + *

To be replaced by auto-generated mappers implementations + */ +public class LineageFlagsInputMapper + implements ModelMapper { + + public static final LineageFlagsInputMapper INSTANCE = new LineageFlagsInputMapper(); + + @Nonnull + public static com.linkedin.metadata.query.LineageFlags map( + QueryContext queryContext, @Nonnull final LineageFlags lineageFlags) { + return INSTANCE.apply(queryContext, lineageFlags); + } + + @Override + public com.linkedin.metadata.query.LineageFlags apply( + QueryContext context, @Nullable final LineageFlags lineageFlags) { + com.linkedin.metadata.query.LineageFlags result = + new com.linkedin.metadata.query.LineageFlags(); + if (lineageFlags == null) { + return result; + } + if (lineageFlags.getIgnoreAsHops() != null) { + result.setIgnoreAsHops(mapIgnoreAsHops(lineageFlags.getIgnoreAsHops())); + } + if (lineageFlags.getStartTimeMillis() != null) { + result.setStartTimeMillis(lineageFlags.getStartTimeMillis()); + } + // Default to "now" if no end time is provided, but start time is provided. + Long endTimeMillis = + ResolverUtils.getLineageEndTimeMillis( + lineageFlags.getStartTimeMillis(), lineageFlags.getEndTimeMillis()); + if (endTimeMillis != null) { + result.setEndTimeMillis(endTimeMillis); + } + if (lineageFlags.getEntitiesExploredPerHopLimit() != null) { + result.setEntitiesExploredPerHopLimit(lineageFlags.getEntitiesExploredPerHopLimit()); + } + return result; + } + + private static UrnArrayMap mapIgnoreAsHops(List ignoreAsHops) { + UrnArrayMap result = new UrnArrayMap(); + ignoreAsHops.forEach( + ignoreAsHop -> + result.put( + EntityTypeMapper.getName(ignoreAsHop.getEntityType()), + new UrnArray( + Optional.ofNullable(ignoreAsHop.getPlatforms()) + .orElse(Collections.emptyList()) + .stream() + .map(UrnUtils::getUrn) + .collect(Collectors.toList())))); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/MetadataAttributionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/MetadataAttributionMapper.java new file mode 100644 index 00000000000000..55fb7ad6f3a2b8 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/MetadataAttributionMapper.java @@ -0,0 +1,35 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.MetadataAttribution; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class MetadataAttributionMapper + implements ModelMapper { + + public static final MetadataAttributionMapper INSTANCE = new MetadataAttributionMapper(); + + public static MetadataAttribution map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.MetadataAttribution metadata) { + return INSTANCE.apply(context, metadata); + } + + @Override + public MetadataAttribution apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.common.MetadataAttribution input) { + final MetadataAttribution result = new MetadataAttribution(); + result.setTime(input.getTime()); + result.setActor(UrnToEntityMapper.map(context, input.getActor())); + if (input.getSource() != null) { + result.setSource(UrnToEntityMapper.map(context, input.getSource())); + } + if (input.getSourceDetail() != null) { + result.setSourceDetail(StringMapMapper.map(context, input.getSourceDetail())); + } + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 986954fab87dbd..5ed6aa609946f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -1,59 +1,68 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.Operation; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class OperationMapper implements TimeSeriesAspectMapper { +public class OperationMapper + implements TimeSeriesAspectMapper { - public static final OperationMapper INSTANCE = new OperationMapper(); + public static final OperationMapper INSTANCE = new OperationMapper(); - public static com.linkedin.datahub.graphql.generated.Operation map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); - } + public static com.linkedin.datahub.graphql.generated.Operation map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.Operation apply( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + + Operation gmsProfile = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + Operation.class); + + final com.linkedin.datahub.graphql.generated.Operation result = + new com.linkedin.datahub.graphql.generated.Operation(); - @Override - public com.linkedin.datahub.graphql.generated.Operation apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - Operation gmsProfile = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - Operation.class); - - final com.linkedin.datahub.graphql.generated.Operation result = - new com.linkedin.datahub.graphql.generated.Operation(); - - result.setTimestampMillis(gmsProfile.getTimestampMillis()); - result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); - if (gmsProfile.hasActor()) { - result.setActor(gmsProfile.getActor().toString()); - } - result.setOperationType(OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); - result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); - if (gmsProfile.hasSourceType()) { - result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); - } - if (gmsProfile.hasPartitionSpec()) { - result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); - } - if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); - } - if (gmsProfile.hasNumAffectedRows()) { - result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); - } - if (gmsProfile.hasAffectedDatasets()) { - result.setAffectedDatasets(gmsProfile.getAffectedDatasets().stream().map(Urn::toString).collect(Collectors.toList())); - } - - return result; + result.setTimestampMillis(gmsProfile.getTimestampMillis()); + result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); + if (gmsProfile.hasActor()) { + result.setActor(gmsProfile.getActor().toString()); } + result.setOperationType( + OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); + result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); + if (gmsProfile.hasSourceType()) { + result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); + } + if (gmsProfile.hasPartitionSpec()) { + result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); + } + if (gmsProfile.hasCustomProperties()) { + result.setCustomProperties(StringMapMapper.map(context, gmsProfile.getCustomProperties())); + } + if (gmsProfile.hasNumAffectedRows()) { + result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); + } + if (gmsProfile.hasAffectedDatasets()) { + result.setAffectedDatasets( + gmsProfile.getAffectedDatasets().stream() + .map(Urn::toString) + .collect(Collectors.toList())); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index 181bdc176fb941..8b4f9a1f4ca506 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -9,52 +12,56 @@ import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnerMapper { - public static final OwnerMapper INSTANCE = new OwnerMapper(); + public static final OwnerMapper INSTANCE = new OwnerMapper(); - public static Owner map(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + public static Owner map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, owner, entityUrn); + } + + public Owner apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { + final Owner result = new Owner(); + // Deprecated + result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); + + if (owner.getTypeUrn() == null) { + OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); + owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - final Owner result = new Owner(); - // Deprecated - result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); - - if (owner.getTypeUrn() == null) { - OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); - owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); - } - - if (owner.getTypeUrn() != null) { - OwnershipTypeEntity entity = new OwnershipTypeEntity(); - entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); - entity.setUrn(owner.getTypeUrn().toString()); - result.setOwnershipType(entity); - } - if (owner.getOwner().getEntityType().equals("corpuser")) { - CorpUser partialOwner = new CorpUser(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } else { - CorpGroup partialOwner = new CorpGroup(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } - if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); - } - result.setAssociatedUrn(entityUrn.toString()); - return result; + if (owner.getTypeUrn() != null) { + OwnershipTypeEntity entity = new OwnershipTypeEntity(); + entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); + entity.setUrn(owner.getTypeUrn().toString()); + result.setOwnershipType(entity); + } + if (owner.getOwner().getEntityType().equals("corpuser")) { + CorpUser partialOwner = new CorpUser(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } else { + CorpGroup partialOwner = new CorpGroup(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } + if (owner.hasSource()) { + result.setSource(OwnershipSourceMapper.map(context, owner.getSource())); } + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index d978abee5bdfc1..5cf680d88281f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -1,56 +1,58 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import com.linkedin.common.urn.UrnUtils; -import javax.annotation.Nonnull; - import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnerUpdate; -import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; +import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import com.linkedin.common.urn.Urn; - import java.net.URISyntaxException; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OwnerUpdateMapper implements ModelMapper { - private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); + + public static Owner map(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { + return INSTANCE.apply(context, input); + } + + @Override + public Owner apply(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { + final Owner owner = new Owner(); + try { + if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { + owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); + } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { + owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } - - @Override - public Owner apply(@Nonnull final OwnerUpdate input) { - final Owner owner = new Owner(); - try { - if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { - owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); - } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { - owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); - } - } catch (URISyntaxException e) { - e.printStackTrace(); - } - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - } - // For backwards compatibility we have to always set the deprecated type. - // If the type exists we assume it's an old ownership type that we can map to. - // Else if it's a net new custom ownership type set old type to CUSTOM. - OwnershipType type = input.getType() != null ? OwnershipType.valueOf(input.getType().toString()) + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + } + // For backwards compatibility we have to always set the deprecated type. + // If the type exists we assume it's an old ownership type that we can map to. + // Else if it's a net new custom ownership type set old type to CUSTOM. + OwnershipType type = + input.getType() != null + ? OwnershipType.valueOf(input.getType().toString()) : OwnershipType.CUSTOM; - owner.setType(type); - - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - owner.setType(OwnershipType.CUSTOM); - } + owner.setType(type); - owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); - return owner; + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + owner.setType(OwnershipType.CUSTOM); } + + owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); + return owner; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 6614cfb28a4784..a3a28717c9eb81 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -1,31 +1,38 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Ownership; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnershipMapper { - public static final OwnershipMapper INSTANCE = new OwnershipMapper(); + public static final OwnershipMapper INSTANCE = new OwnershipMapper(); - public static Ownership map(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); - } + public static Ownership map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, ownership, entityUrn); + } - public Ownership apply(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); - result.setOwners(ownership.getOwners() - .stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) - .collect(Collectors.toList())); - return result; - } + public Ownership apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { + final Ownership result = new Ownership(); + result.setLastModified(AuditStampMapper.map(context, ownership.getLastModified())); + result.setOwners( + ownership.getOwners().stream() + .map(owner -> OwnerMapper.map(context, owner, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index abcc67c35f92af..12a38d9caa284e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -1,30 +1,35 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class OwnershipSourceMapper implements ModelMapper { +public class OwnershipSourceMapper + implements ModelMapper { - public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); + public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); - public static OwnershipSource map(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); - } + public static OwnershipSource map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + return INSTANCE.apply(context, ownershipSource); + } - @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - final OwnershipSource result = new OwnershipSource(); - result.setUrl(ownershipSource.getUrl()); - result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); - return result; - } + @Override + public OwnershipSource apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + final OwnershipSource result = new OwnershipSource(); + result.setUrl(ownershipSource.getUrl()); + result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 1162c69d74938f..6ceccff8a9e76e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -1,39 +1,46 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; - +import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper { - private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); - } - - @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - final Ownership ownership = new Ownership(); - - ownership.setOwners(new OwnerArray(input.getOwners() - .stream() - .map(OwnerUpdateMapper::map) - .collect(Collectors.toList()))); - - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - ownership.setLastModified(auditStamp); - - return ownership; - } + private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); + + public static Ownership map( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { + return INSTANCE.apply(context, input, actor); + } + + @Override + public Ownership apply( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { + final Ownership ownership = new Ownership(); + + ownership.setOwners( + new OwnerArray( + input.getOwners().stream() + .map(o -> OwnerUpdateMapper.map(context, o)) + .collect(Collectors.toList()))); + + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + ownership.setLastModified(auditStamp); + + return ownership; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index f3ac008734339e..e6b75f9482f59f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,26 +1,32 @@ package com.linkedin.datahub.graphql.types.common.mappers; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import com.linkedin.metadata.query.GroupingCriterionArray; +import com.linkedin.metadata.query.GroupingSpec; +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps GraphQL SearchFlags to Pegasus * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SearchFlagsInputMapper implements ModelMapper { +public class SearchFlagsInputMapper + implements ModelMapper { public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); - public static com.linkedin.metadata.query.SearchFlags map(@Nonnull final SearchFlags searchFlags) { - return INSTANCE.apply(searchFlags); + public static com.linkedin.metadata.query.SearchFlags map( + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { + return INSTANCE.apply(context, searchFlags); } @Override - public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags searchFlags) { + public com.linkedin.metadata.query.SearchFlags apply( + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { com.linkedin.metadata.query.SearchFlags result = new com.linkedin.metadata.query.SearchFlags(); if (searchFlags.getFulltext() != null) { result.setFulltext(searchFlags.getFulltext()); @@ -42,6 +48,22 @@ public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags if (searchFlags.getGetSuggestions() != null) { result.setGetSuggestions(searchFlags.getGetSuggestions()); } + if (searchFlags.getIncludeSoftDeleted() != null) { + result.setIncludeSoftDeleted(searchFlags.getIncludeSoftDeleted()); + } + if (searchFlags.getIncludeRestricted() != null) { + result.setIncludeRestricted(searchFlags.getIncludeRestricted()); + } + if (searchFlags.getGroupingSpec() != null + && searchFlags.getGroupingSpec().getGroupingCriteria() != null) { + result.setGroupingSpec( + new GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + searchFlags.getGroupingSpec().getGroupingCriteria().stream() + .map(c -> GroupingCriterionInputMapper.map(context, c)) + .collect(Collectors.toList())))); + } return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 942171017cea4a..eea4bcd4a28d2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -1,32 +1,39 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SiblingProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SiblingsMapper implements ModelMapper { +public class SiblingsMapper + implements ModelMapper { public static final SiblingsMapper INSTANCE = new SiblingsMapper(); - public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings siblings) { - return INSTANCE.apply(siblings); + public static SiblingProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { + return INSTANCE.apply(context, siblings); } @Override - public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { + public SiblingProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); - result.setSiblings(siblings.getSiblings() - .stream() - .map(UrnToEntityMapper::map) - .collect(Collectors.toList())); + result.setSiblings( + siblings.getSiblings().stream() + .filter(s -> context == null | canView(context.getOperationContext(), s)) + .map(s -> UrnToEntityMapper.map(context, s)) + .collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 25d01d8de0e4c7..f4f829a046f2ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -1,22 +1,25 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class StatusMapper implements ModelMapper { - public static final StatusMapper INSTANCE = new StatusMapper(); + public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); - } + public static Status map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(context, metadata); + } - @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { - final Status result = new Status(); - result.setRemoved(input.isRemoved()); - return result; - } + @Override + public Status apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status input) { + final Status result = new Status(); + result.setRemoved(input.isRemoved()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 32c49a20104142..4175fdb2028653 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -1,35 +1,38 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.StringMapEntry; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class StringMapMapper implements ModelMapper, List> { - public static final StringMapMapper INSTANCE = new StringMapMapper(); + public static final StringMapMapper INSTANCE = new StringMapMapper(); - public static List map(@Nonnull final Map input) { - return INSTANCE.apply(input); - } + public static List map( + @Nullable QueryContext context, @Nonnull final Map input) { + return INSTANCE.apply(context, input); + } - @Override - public List apply(@Nonnull final Map input) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final StringMapEntry entry = new StringMapEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - results.add(entry); - } - return results; + @Override + public List apply( + @Nullable QueryContext context, @Nonnull final Map input) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final StringMapEntry entry = new StringMapEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java new file mode 100644 index 00000000000000..924ee92d2f00f5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -0,0 +1,28 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.ArrayList; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class SubTypesMapper + implements ModelMapper { + + public static final SubTypesMapper INSTANCE = new SubTypesMapper(); + + public static com.linkedin.datahub.graphql.generated.SubTypes map( + @Nullable QueryContext context, @Nonnull final SubTypes metadata) { + return INSTANCE.apply(context, metadata); + } + + @Override + public com.linkedin.datahub.graphql.generated.SubTypes apply( + @Nullable QueryContext context, @Nonnull final SubTypes input) { + final com.linkedin.datahub.graphql.generated.SubTypes result = + new com.linkedin.datahub.graphql.generated.SubTypes(); + result.setTypeNames(new ArrayList<>(input.getTypeNames())); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java index 8359f1ec86f34e..4fdf7edea07d9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -4,22 +4,24 @@ import java.util.List; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class UpstreamLineagesMapper { public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); - public static List map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + public static List map( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { return INSTANCE.apply(upstreamLineage); } - public List apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { - if (!upstreamLineage.hasFineGrainedLineages() || upstreamLineage.getFineGrainedLineages() == null) { + public List apply( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + if (!upstreamLineage.hasFineGrainedLineages() + || upstreamLineage.getFineGrainedLineages() == null) { return new ArrayList<>(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 34bf56a396b620..1988cafc486c18 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.BusinessAttribute; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -17,6 +21,7 @@ import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.Domain; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; @@ -28,25 +33,27 @@ import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; +import com.linkedin.datahub.graphql.generated.QueryEntity; +import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class UrnToEntityMapper implements ModelMapper { +public class UrnToEntityMapper implements ModelMapper { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); - public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { - return INSTANCE.apply(urn); + public static Entity map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.urn.Urn urn) { + return INSTANCE.apply(context, urn); } @Override - public Entity apply(Urn input) { + public Entity apply(@Nullable QueryContext context, Urn input) { Entity partialEntity = null; if (input.getEntityType().equals("dataset")) { partialEntity = new Dataset(); @@ -153,6 +160,11 @@ public Entity apply(Urn input) { ((Domain) partialEntity).setUrn(input.toString()); ((Domain) partialEntity).setType(EntityType.DOMAIN); } + if (input.getEntityType().equals("erModelRelationship")) { + partialEntity = new ERModelRelationship(); + ((ERModelRelationship) partialEntity).setUrn(input.toString()); + ((ERModelRelationship) partialEntity).setType(EntityType.ER_MODEL_RELATIONSHIP); + } if (input.getEntityType().equals("assertion")) { partialEntity = new Assertion(); ((Assertion) partialEntity).setUrn(input.toString()); @@ -193,6 +205,26 @@ public Entity apply(Urn input) { ((OwnershipTypeEntity) partialEntity).setUrn(input.toString()); ((OwnershipTypeEntity) partialEntity).setType(EntityType.CUSTOM_OWNERSHIP_TYPE); } + if (input.getEntityType().equals(STRUCTURED_PROPERTY_ENTITY_NAME)) { + partialEntity = new StructuredPropertyEntity(); + ((StructuredPropertyEntity) partialEntity).setUrn(input.toString()); + ((StructuredPropertyEntity) partialEntity).setType(EntityType.STRUCTURED_PROPERTY); + } + if (input.getEntityType().equals(QUERY_ENTITY_NAME)) { + partialEntity = new QueryEntity(); + ((QueryEntity) partialEntity).setUrn(input.toString()); + ((QueryEntity) partialEntity).setType(EntityType.QUERY); + } + if (input.getEntityType().equals(RESTRICTED_ENTITY_NAME)) { + partialEntity = new Restricted(); + ((Restricted) partialEntity).setUrn(input.toString()); + ((Restricted) partialEntity).setType(EntityType.RESTRICTED); + } + if (input.getEntityType().equals(BUSINESS_ATTRIBUTE_ENTITY_NAME)) { + partialEntity = new BusinessAttribute(); + ((BusinessAttribute) partialEntity).setUrn(input.toString()); + ((BusinessAttribute) partialEntity).setType(EntityType.BUSINESS_ATTRIBUTE); + } return partialEntity; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 1e284efdb610f1..d9eab8e1ce9492 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -1,20 +1,19 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspectMap; import java.util.function.BiConsumer; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.AllArgsConstructor; import lombok.Getter; - +import org.apache.commons.lang3.function.TriConsumer; @AllArgsConstructor public class MappingHelper { - @Nonnull - private final EnvelopedAspectMap _aspectMap; - @Getter - @Nonnull - private final O result; + @Nonnull private final EnvelopedAspectMap _aspectMap; + @Getter @Nonnull private final O result; public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { if (_aspectMap.containsKey(aspectName)) { @@ -22,4 +21,14 @@ public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { + if (_aspectMap.containsKey(aspectName)) { + DataMap dataMap = _aspectMap.get(aspectName).getValue().data(); + consumer.accept(context, result, dataMap); + } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java index 7d1b374e1f9b6c..00e339a0320ef7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.Setter; - @Data @Setter @Getter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java index d08300d648c323..46df032cbffbff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java @@ -1,19 +1,17 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.mxe.SystemMetadata; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} @Nullable public static Long getLastIngestedTime(@Nonnull EnvelopedAspectMap aspectMap) { @@ -28,7 +26,8 @@ public static String getLastIngestedRunId(@Nonnull EnvelopedAspectMap aspectMap) } /** - * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects present for the entity. + * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects + * present for the entity. */ @Nonnull public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap aspectMap) { @@ -36,12 +35,16 @@ public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap asp for (String aspect : aspectMap.keySet()) { if (aspectMap.get(aspect).hasSystemMetadata()) { SystemMetadata systemMetadata = aspectMap.get(aspect).getSystemMetadata(); - if (systemMetadata.hasLastRunId() && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + if (systemMetadata.hasLastRunId() + && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getLastRunId(); RunInfo run = new RunInfo(runId, lastObserved); runs.add(run); - } else if (systemMetadata.hasRunId() && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + } else if (systemMetadata.hasRunId() + && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { // Handle the legacy case: Check original run ids. Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getRunId(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java index 108aa7ed5b0c9e..606cebba0880f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java @@ -6,7 +6,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import lombok.AllArgsConstructor; - @AllArgsConstructor public class UpdateMappingHelper { private final String entityName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/connection/DataHubConnectionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/connection/DataHubConnectionType.java new file mode 100644 index 00000000000000..0a62d224c65130 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/connection/DataHubConnectionType.java @@ -0,0 +1,87 @@ +package com.linkedin.datahub.graphql.types.connection; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubConnection; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.resolvers.connection.ConnectionMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import graphql.execution.DataFetcherResult; +import io.datahubproject.metadata.services.SecretService; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataHubConnectionType + implements com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATAHUB_CONNECTION_DETAILS_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + private final EntityClient _entityClient; + private final SecretService _secretService; + + public DataHubConnectionType( + @Nonnull final EntityClient entityClient, @Nonnull final SecretService secretService) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + _secretService = Objects.requireNonNull(secretService, "secretService must not be null"); + } + + @Override + public EntityType type() { + return EntityType.DATAHUB_CONNECTION; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataHubConnection.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List connectionUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATAHUB_CONNECTION_ENTITY_NAME, + new HashSet<>(connectionUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : connectionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ConnectionMapper.map(context, gmsResult, _secretService)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Connections", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 20cfe6ac461273..db44a5be8bdd37 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -19,7 +19,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.net.URISyntaxException; @@ -33,31 +32,33 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -public class ContainerType implements SearchableEntityType, +public class ContainerType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME, - Constants.SUB_TYPES_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - Constants.CONTAINER_ASPECT_NAME, - Constants.DOMAINS_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.CONTAINER_PROPERTIES_ASPECT_NAME, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME, + Constants.SUB_TYPES_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + Constants.CONTAINER_ASPECT_NAME, + Constants.DOMAINS_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.DATA_PRODUCTS_ASPECT_NAME, + Constants.STRUCTURED_PROPERTIES_ASPECT_NAME, + Constants.FORMS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; private final EntityClient _entityClient; - public ContainerType(final EntityClient entityClient) { + public ContainerType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -77,28 +78,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List containerUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List containerUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.CONTAINER_ENTITY_NAME, - new HashSet<>(containerUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.CONTAINER_ENTITY_NAME, + new HashSet<>(containerUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(ContainerMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ContainerMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Container", e); @@ -114,24 +117,36 @@ private Urn getUrn(final String urnStr) { } @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ENTITY_NAME, + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index aeaa8f4f85c144..2c0dc142bee3d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.types.container.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -12,33 +15,36 @@ import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class ContainerMapper { @Nullable - public static Container map(final EntityResponse entityResponse) { + public static Container map( + @Nullable final QueryContext context, final EntityResponse entityResponse) { final Container result = new Container(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -48,92 +54,130 @@ public static Container map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } - final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedContainerProperties = + aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { - result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); + result.setProperties( + mapContainerProperties( + new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedEditableContainerProperties = + aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { - result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); + result.setEditableProperties( + mapContainerEditableProperties( + new EditableContainerProperties( + envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map( + context, new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); if (statusAspect != null) { - result.setStatus(StatusMapper.map(new Status(statusAspect.getValue().data()))); + result.setStatus(StatusMapper.map(context, new Status(statusAspect.getValue().data()))); } final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(mapSubTypes(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes( + SubTypesMapper.map(context, new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); - result.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(envelopedContainer.getValue().data()); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); if (envelopedDomains != null) { final Domains domains = new Domains(envelopedDomains.getValue().data()); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); + result.setDomain(DomainAssociationMapper.map(context, domains, entityUrn.toString())); } final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + result.setDeprecation( + DeprecationMapper.map(context, new Deprecation(envelopedDeprecation.getValue().data()))); + } + + final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); + if (envelopedStructuredProps != null) { + result.setStructuredProperties( + StructuredPropertiesMapper.map( + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); + } + + final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); + if (envelopedForms != null) { + result.setForms( + FormsMapper.map(new Forms(envelopedForms.getValue().data()), entityUrn.toString())); } return result; } - private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties, Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties( + final ContainerProperties gmsProperties, Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - propertiesResult.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + propertiesResult.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } if (gmsProperties.hasQualifiedName()) { propertiesResult.setQualifiedName(gmsProperties.getQualifiedName().toString()); @@ -142,20 +186,15 @@ private static com.linkedin.datahub.graphql.generated.ContainerProperties mapCon return propertiesResult; } - private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( - final EditableContainerProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = - new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties + mapContainerEditableProperties(final EditableContainerProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.ContainerEditableProperties + editableContainerProperties = + new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } - private static com.linkedin.datahub.graphql.generated.SubTypes mapSubTypes(final SubTypes gmsSubTypes) { - final com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(gmsSubTypes.getTypeNames()); - return subTypes; - } - private static DataPlatform mapPlatform(final DataPlatformInstance platformInstance) { // Set dummy platform to be resolved. final DataPlatform dummyPlatform = new DataPlatform(); @@ -163,5 +202,5 @@ private static DataPlatform mapPlatform(final DataPlatformInstance platformInsta return dummyPlatform; } - private ContainerMapper() { } + private ContainerMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 285a119be0d43c..16d2940a392447 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -1,14 +1,18 @@ package com.linkedin.datahub.graphql.types.corpgroup; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -28,7 +32,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -42,155 +45,196 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - -public class CorpGroupType implements SearchableEntityType, MutableType { - - private final EntityClient _entityClient; - - public CorpGroupType(final EntityClient entityClient) { - _entityClient = entityClient; +public class CorpGroupType + implements SearchableEntityType, + MutableType { + + private final EntityClient _entityClient; + + public CorpGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return CorpGroup.class; + } + + public Class inputClass() { + return CorpGroupUpdateInput.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map corpGroupMap = + _entityClient.batchGetV2( + context.getOperationContext(), + CORP_GROUP_ENTITY_NAME, + new HashSet<>(corpGroupUrns), + null); + + final List results = new ArrayList<>(urns.size()); + for (Urn urn : corpGroupUrns) { + results.add(corpGroupMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpGroup -> + gmsCorpGroup == null + ? null + : DataFetcherResult.newResult() + .data(CorpGroupMapper.map(context, gmsCorpGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load CorpGroup", e); } - - @Override - public Class objectClass() { - return CorpGroup.class; - } - - public Class inputClass() { - return CorpGroupUpdateInput.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "corpGroup", + query, + Collections.emptyMap(), + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "corpGroup", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public CorpGroup update( + @Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Urn groupUrn = Urn.createFromString(urn); + Map gmsResponse = + _entityClient.batchGetV2( + context.getOperationContext(), + CORP_GROUP_ENTITY_NAME, + ImmutableSet.of(groupUrn), + ImmutableSet.of(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)); + + CorpGroupEditableInfo existingCorpGroupEditableInfo = null; + if (gmsResponse.containsKey(groupUrn) + && gmsResponse + .get(groupUrn) + .getAspects() + .containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { + existingCorpGroupEditableInfo = + new CorpGroupEditableInfo( + gmsResponse + .get(groupUrn) + .getAspects() + .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + + return load(urn, context).getData(); } - - @Override - public EntityType type() { - return EntityType.CORP_GROUP; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpGroupUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final CorpGroupUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getDescription() != null) { + // Requires the Update Docs privilege. + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { + // Requires the Update Contact info privilege. + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); } - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpGroupUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpGroupMap = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(corpGroupUrns), null, context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpGroupUrns) { - results.add(corpGroupMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpGroup -> gmsCorpGroup == null ? null - : DataFetcherResult.newResult().data(CorpGroupMapper.map(gmsCorpGroup)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load CorpGroup", e); - } - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult - searchResult = _entityClient.search("corpGroup", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpGroup", query, filters, limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + private RecordTemplate mapCorpGroupEditableInfo( + CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { + CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - @Override - public CorpGroup update(@Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Urn groupUrn = Urn.createFromString(urn); - Map gmsResponse = - _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, ImmutableSet.of(groupUrn), ImmutableSet.of( - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), - context.getAuthentication()); - - CorpGroupEditableInfo existingCorpGroupEditableInfo = null; - if (gmsResponse.containsKey(groupUrn) && gmsResponse.get(groupUrn).getAspects().containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { - existingCorpGroupEditableInfo = new CorpGroupEditableInfo(gmsResponse.get(groupUrn).getAspects() - .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME).getValue().data()); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); } - - private boolean isAuthorizedToUpdate(String urn, CorpGroupUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpGroupUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getDescription() != null) { - // Requires the Update Docs privilege. - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { - // Requires the Update Contact info privilege. - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } - - private RecordTemplate mapCorpGroupEditableInfo(CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { - CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - - if (input.getDescription() != null) { - result.setDescription(input.getDescription()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - return result; + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java index c1cd33b0077f63..318506d9d61fae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpgroup; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpGroupUrn; +import java.net.URISyntaxException; public class CorpGroupUtils { - private CorpGroupUtils() { } + private CorpGroupUtils() {} - public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpGroupUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); - } + public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpGroupUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index f476794bc545ed..ed22bb06fd5c26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -1,30 +1,51 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; import com.linkedin.data.template.GetMode; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupEditablePropertiesMapper implements ModelMapper { +public class CorpGroupEditablePropertiesMapper + implements ModelMapper< + com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { + + private final Logger _logger = + LoggerFactory.getLogger(CorpGroupEditablePropertiesMapper.class.getName()); - public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); + public static final CorpGroupEditablePropertiesMapper INSTANCE = + new CorpGroupEditablePropertiesMapper(); - public static CorpGroupEditableProperties map(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { - return INSTANCE.apply(corpGroupEditableInfo); + public static CorpGroupEditableProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + return INSTANCE.apply(context, corpGroupEditableInfo); } @Override - public CorpGroupEditableProperties apply(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public CorpGroupEditableProperties apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); + com.linkedin.common.url.Url pictureLinkObject = + corpGroupEditableInfo.getPictureLink(GetMode.NULL); + String pictureLink = null; + if (pictureLinkObject != null) { + pictureLink = pictureLinkObject.toString(); + } + result.setPictureLink(pictureLink); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 3d2d4aea2b0015..918d7f19b99f12 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,48 +1,62 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; -import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupInfoMapper implements ModelMapper { +public class CorpGroupInfoMapper + implements ModelMapper { - public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); + public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); - public static CorpGroupInfo map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); - } + public static CorpGroupInfo map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(context, corpGroupInfo); + } - @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { - final CorpGroupInfo result = new CorpGroupInfo(); - result.setEmail(info.getEmail()); - result.setDescription(info.getDescription()); - result.setDisplayName(info.getDisplayName()); - if (info.hasAdmins()) { - result.setAdmins(info.getAdmins().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasMembers()) { - result.setMembers(info.getMembers().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasGroups()) { - result.setGroups(info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); - } - return result; + @Override + public CorpGroupInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { + final CorpGroupInfo result = new CorpGroupInfo(); + result.setEmail(info.getEmail()); + result.setDescription(info.getDescription()); + result.setDisplayName(info.getDisplayName()); + if (info.hasAdmins()) { + result.setAdmins( + info.getAdmins().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasMembers()) { + result.setMembers( + info.getMembers().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasGroups()) { + result.setGroups( + info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 0fb1b66c644d78..6246cf64bbf7f8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -1,93 +1,120 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.Forms; import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.key.CorpGroupKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpGroupMapper implements ModelMapper { - public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); + public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static CorpGroup map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { - final CorpGroup result = new CorpGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public CorpGroup apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final CorpGroup result = new CorpGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); - if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { - mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); - } else { - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - result.setOrigin(mappedGroupOrigin); - } - return mappingHelper.getResult(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); + mappingHelper.mapToResult(context, CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); + mappingHelper.mapToResult( + context, CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (entity, dataMap) -> this.mapOwnership(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { + mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); + } else { + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + result.setOrigin(mappedGroupOrigin); } + return mappingHelper.getResult(); + } - private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); - corpGroup.setName(corpGroupKey.getName()); - } + private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); + corpGroup.setName(corpGroupKey.getName()); + } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); - } + private void mapCorpGroupInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(context, corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(context, corpGroupInfo)); + } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - corpGroup.setEditableProperties(CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); - } + private void mapCorpGroupEditableInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + corpGroup.setEditableProperties( + CorpGroupEditablePropertiesMapper.map(context, new CorpGroupEditableInfo(dataMap))); + } - private void mapOwnership(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); - } + private void mapOwnership( + @Nullable QueryContext context, + @Nonnull CorpGroup corpGroup, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn)); + } - private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - Origin groupOrigin = new Origin(dataMap); - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - if (groupOrigin.hasType()) { - mappedGroupOrigin.setType( - com.linkedin.datahub.graphql.generated.OriginType.valueOf(groupOrigin.getType().toString())); - } else { - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - } - if (groupOrigin.hasExternalType()) { - mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); - } - corpGroup.setOrigin(mappedGroupOrigin); + private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + Origin groupOrigin = new Origin(dataMap); + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + if (groupOrigin.hasType()) { + mappedGroupOrigin.setType( + com.linkedin.datahub.graphql.generated.OriginType.valueOf( + groupOrigin.getType().toString())); + } else { + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + } + if (groupOrigin.hasExternalType()) { + mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); } + corpGroup.setOrigin(mappedGroupOrigin); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 266d8be67cb061..3feef06b6cbb0b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -1,26 +1,31 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupPropertiesMapper implements ModelMapper { +public class CorpGroupPropertiesMapper + implements ModelMapper { public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); - public static CorpGroupProperties map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); + public static CorpGroupProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(context, corpGroupInfo); } @Override - public CorpGroupProperties apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + public CorpGroupProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { final CorpGroupProperties result = new CorpGroupProperties(); result.setEmail(info.getEmail()); result.setDescription(info.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index db2b49c790f57e..3c2bfd7225edf5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -1,6 +1,13 @@ package com.linkedin.datahub.graphql.types.corpuser; +import static com.linkedin.datahub.graphql.Constants.DEFAULT_PERSONA_URNS; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; +import com.linkedin.common.UrnArray; import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -8,9 +15,9 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpUser; @@ -30,7 +37,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -45,176 +51,219 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; +public class CorpUserType + implements SearchableEntityType, MutableType { + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; -public class CorpUserType implements SearchableEntityType, MutableType { + public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = featureFlags; + } - private final EntityClient _entityClient; - private final FeatureFlags _featureFlags; + @Override + public Class objectClass() { + return CorpUser.class; + } - public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { - _entityClient = entityClient; - _featureFlags = featureFlags; - } + @Override + public EntityType type() { + return EntityType.CORP_USER; + } - @Override - public Class objectClass() { - return CorpUser.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public EntityType type() { - return EntityType.CORP_USER; - } + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpUserUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + final Map corpUserMap = + _entityClient.batchGetV2( + context.getOperationContext(), + CORP_USER_ENTITY_NAME, + new HashSet<>(corpUserUrns), + null); - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpUserUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpUserMap = _entityClient - .batchGetV2(CORP_USER_ENTITY_NAME, new HashSet<>(corpUserUrns), null, - context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpUserUrns) { - results.add(corpUserMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpUser -> gmsCorpUser == null ? null - : DataFetcherResult.newResult().data(CorpUserMapper.map(gmsCorpUser, _featureFlags)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + final List results = new ArrayList<>(urns.size()); + for (Urn urn : corpUserUrns) { + results.add(corpUserMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpUser -> + gmsCorpUser == null + ? null + : DataFetcherResult.newResult() + .data(CorpUserMapper.map(context, gmsCorpUser, _featureFlags)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search("corpuser", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "corpuser", + query, + Collections.emptyMap(), + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "corpuser", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } - public Class inputClass() { - return CorpUserUpdateInput.class; - } + public Class inputClass() { + return CorpUserUpdateInput.class; + } + + @Override + public CorpUser update( + @Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Optional existingCorpUserEditableInfo = + _entityClient.getVersionedAspect( + context.getOperationContext(), + urn, + CORP_USER_EDITABLE_INFO_NAME, + 0L, + CorpUserEditableInfo.class); - @Override - public CorpUser update(@Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Optional existingCorpUserEditableInfo = - _entityClient.getVersionedAspect(urn, CORP_USER_EDITABLE_INFO_NAME, 0L, CorpUserEditableInfo.class, - context.getAuthentication()); - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_USER_EDITABLE_INFO_NAME, mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_USER_EDITABLE_INFO_NAME, + mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); + _entityClient.ingestProposal(context.getOperationContext(), proposal, false); + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private boolean isAuthorizedToUpdate(String urn, CorpUserUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + private boolean isAuthorizedToUpdate( + String urn, CorpUserUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - // Either the updating actor is the user, or the actor has privileges to update the user information. - return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( + // Either the updating actor is the user, or the actor has privileges to update the user + // information. + return context.getActorUrn().equals(urn) + || AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); - } + } - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getSlack() != null - || updateInput.getEmail() != null - || updateInput.getPhone() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } else if (updateInput.getAboutMe() != null - || updateInput.getDisplayName() != null - || updateInput.getPictureLink() != null - || updateInput.getTeams() != null - || updateInput.getTitle() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getSlack() != null + || updateInput.getEmail() != null + || updateInput.getPhone() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); + } else if (updateInput.getAboutMe() != null + || updateInput.getDisplayName() != null + || updateInput.getPictureLink() != null + || updateInput.getTeams() != null + || updateInput.getTitle() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); } - private RecordTemplate mapCorpUserEditableInfo(CorpUserUpdateInput input, Optional existing) { - CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getPictureLink() != null) { - result.setPictureLink(new Url(input.getPictureLink())); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getSkills() != null) { - result.setSkills(new StringArray(input.getSkills())); - } - if (input.getTeams() != null) { - result.setTeams(new StringArray(input.getTeams())); - } - if (input.getTitle() != null) { - result.setTitle(input.getTitle()); - } - if (input.getPhone() != null) { - result.setPhone(input.getPhone()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - - return result; + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } + + private RecordTemplate mapCorpUserEditableInfo( + CorpUserUpdateInput input, Optional existing) { + CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); + } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); + } + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); + } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); + } + if (input.getSkills() != null) { + result.setSkills(new StringArray(input.getSkills())); + } + if (input.getTeams() != null) { + result.setTeams(new StringArray(input.getTeams())); + } + if (input.getTitle() != null) { + result.setTitle(input.getTitle()); + } + if (input.getPhone() != null) { + result.setPhone(input.getPhone()); + } + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); + } + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); + } + if (input.getPlatformUrns() != null) { + result.setPlatforms( + new UrnArray( + input.getPlatformUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()))); + } + if (input.getPersonaUrn() != null) { + if (DEFAULT_PERSONA_URNS.contains(input.getPersonaUrn())) { + result.setPersona(UrnUtils.getUrn(input.getPersonaUrn())); + } else { + throw new DataHubGraphQLException( + String.format("Provided persona urn %s does not exist", input.getPersonaUrn()), + DataHubGraphQLErrorCode.NOT_FOUND); + } } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java index 0b5b40c3117e05..9cf8da69281a96 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpuser; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpuserUrn; +import java.net.URISyntaxException; public class CorpUserUtils { - private CorpUserUtils() { } + private CorpUserUtils() {} - public static CorpuserUrn getCorpUserUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpuserUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpUserUrn from string %s", urnStr), e); - } + public static CorpuserUrn getCorpUserUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpuserUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpUserUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 2a9f0efd69bcc8..38f3c75d7a9fa8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -1,37 +1,62 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; +import com.linkedin.datahub.graphql.generated.DataHubPersona; +import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserEditableInfoMapper implements ModelMapper { +public class CorpUserEditableInfoMapper + implements ModelMapper { - public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); + public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); - public static CorpUserEditableProperties map(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); - } + public static CorpUserEditableProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + return INSTANCE.apply(context, info); + } - @Override - public CorpUserEditableProperties apply(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - final CorpUserEditableProperties result = new CorpUserEditableProperties(); - result.setDisplayName(info.getDisplayName()); - result.setTitle(info.getTitle()); - result.setAboutMe(info.getAboutMe()); - result.setSkills(info.getSkills()); - result.setTeams(info.getTeams()); - result.setEmail(info.getEmail()); - result.setPhone(info.getPhone()); - result.setSlack(info.getSlack()); - if (info.hasPictureLink()) { - result.setPictureLink(info.getPictureLink().toString()); - } - return result; + @Override + public CorpUserEditableProperties apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + final CorpUserEditableProperties result = new CorpUserEditableProperties(); + result.setDisplayName(info.getDisplayName()); + result.setTitle(info.getTitle()); + result.setAboutMe(info.getAboutMe()); + result.setSkills(info.getSkills()); + result.setTeams(info.getTeams()); + result.setEmail(info.getEmail()); + result.setPhone(info.getPhone()); + result.setSlack(info.getSlack()); + if (info.hasPictureLink()) { + result.setPictureLink(info.getPictureLink().toString()); + } + if (info.hasPlatforms()) { + result.setPlatforms( + info.getPlatforms().stream() + .map( + urn -> { + DataPlatform platform = new DataPlatform(); + platform.setUrn(urn.toString()); + return platform; + }) + .collect(Collectors.toList())); + } + if (info.hasPersona()) { + DataHubPersona persona = new DataHubPersona(); + persona.setUrn(info.getPersona().toString()); + result.setPersona(persona); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 96f60c08cd7c28..a728ea3695b508 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -1,40 +1,45 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserInfoMapper implements ModelMapper { +public class CorpUserInfoMapper + implements ModelMapper { - public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); + public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); - } + public static CorpUserInfo map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(context, corpUserInfo); + } - @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { - final CorpUserInfo result = new CorpUserInfo(); - result.setActive(info.isActive()); - result.setCountryCode(info.getCountryCode()); - result.setDepartmentId(info.getDepartmentId()); - result.setDepartmentName(info.getDepartmentName()); - result.setEmail(info.getEmail()); - result.setDisplayName(info.getDisplayName()); - result.setFirstName(info.getFirstName()); - result.setLastName(info.getLastName()); - result.setFullName(info.getFullName()); - result.setTitle(info.getTitle()); - if (info.hasManagerUrn()) { - result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); - } - return result; + @Override + public CorpUserInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { + final CorpUserInfo result = new CorpUserInfo(); + result.setActive(info.isActive()); + result.setCountryCode(info.getCountryCode()); + result.setDepartmentId(info.getDepartmentId()); + result.setDepartmentName(info.getDepartmentName()); + result.setEmail(info.getEmail()); + result.setDisplayName(info.getDisplayName()); + result.setFirstName(info.getFirstName()); + result.setLastName(info.getLastName()); + result.setFullName(info.getFullName()); + result.setTitle(info.getTitle()); + if (info.hasManagerUrn()) { + result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index adcfb91c9cdf29..4fa278983399b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserAppearanceSettings; @@ -13,6 +17,8 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -23,123 +29,156 @@ import com.linkedin.identity.CorpUserSettings; import com.linkedin.identity.CorpUserStatus; import com.linkedin.metadata.key.CorpUserKey; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpUserMapper { - public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); + public static final CorpUserMapper INSTANCE = new CorpUserMapper(); + + public static CorpUser map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse, null); + } + + public static CorpUser map( + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(context, entityResponse, featureFlags); + } + + public CorpUser apply( + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { + final CorpUser result = new CorpUser(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_USER); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (corpUser, dataMap) -> this.mapCorpUserInfo(context, corpUser, dataMap, entityUrn)); + mappingHelper.mapToResult( + CORP_USER_EDITABLE_INFO_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setEditableProperties( + CorpUserEditableInfoMapper.map(context, new CorpUserEditableInfo(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setGlobalTags( + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setStatus(CorpUserStatusMapper.map(context, new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + mapCorpUserSettings( + result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); + + return mappingHelper.getResult(); + } + + private void mapCorpUserSettings( + @Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { + CorpUserSettings corpUserSettings = new CorpUserSettings(); + if (envelopedAspect != null) { + corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); } + com.linkedin.datahub.graphql.generated.CorpUserSettings result = + new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - public static CorpUser map(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); - } + // Map Appearance Settings -- Appearance settings always exist. + result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - public CorpUser apply(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - final CorpUser result = new CorpUser(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_USER); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); - mappingHelper.mapToResult(CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, - (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); - mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); - - mapCorpUserSettings(result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); - - return mappingHelper.getResult(); + // Map Views Settings. + if (corpUserSettings.hasViews()) { + result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); } - private void mapCorpUserSettings(@Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { - CorpUserSettings corpUserSettings = new CorpUserSettings(); - if (envelopedAspect != null) { - corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); - } - com.linkedin.datahub.graphql.generated.CorpUserSettings result = - new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - - // Map Appearance Settings -- Appearance settings always exist. - result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - - // Map Views Settings. - if (corpUserSettings.hasViews()) { - result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); - } - - corpUser.setSettings(result); - } - - @Nonnull - private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( - @Nonnull final CorpUserSettings corpUserSettings, - @Nullable final FeatureFlags featureFlags - ) { - CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); - if (featureFlags != null) { - appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); - } else { - appearanceResult.setShowSimplifiedHomepage(false); - } - - if (corpUserSettings.hasAppearance()) { - appearanceResult.setShowSimplifiedHomepage(corpUserSettings.getAppearance().isShowSimplifiedHomepage()); - } - return appearanceResult; - } - - @Nonnull - private CorpUserViewsSettings mapCorpUserViewsSettings(@Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { - CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); - - if (viewsSettings.hasDefaultView()) { - final DataHubView unresolvedView = new DataHubView(); - unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); - unresolvedView.setType(EntityType.DATAHUB_VIEW); - viewsResult.setDefaultView(unresolvedView); - } - - return viewsResult; + corpUser.setSettings(result); + } + + @Nonnull + private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( + @Nonnull final CorpUserSettings corpUserSettings, @Nullable final FeatureFlags featureFlags) { + CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); + if (featureFlags != null) { + appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); + } else { + appearanceResult.setShowSimplifiedHomepage(false); } - private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserKey corpUserKey = new CorpUserKey(dataMap); - corpUser.setUsername(corpUserKey.getUsername()); + if (corpUserSettings.hasAppearance()) { + appearanceResult.setShowSimplifiedHomepage( + corpUserSettings.getAppearance().isShowSimplifiedHomepage()); } - - private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); - CorpUserProperties corpUserProperties = corpUser.getProperties(); - if (corpUserInfo.hasCustomProperties()) { - corpUserProperties.setCustomProperties(CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); - } - corpUser.setProperties(corpUserProperties); + return appearanceResult; + } + + @Nonnull + private CorpUserViewsSettings mapCorpUserViewsSettings( + @Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { + CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); + + if (viewsSettings.hasDefaultView()) { + final DataHubView unresolvedView = new DataHubView(); + unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); + unresolvedView.setType(EntityType.DATAHUB_VIEW); + viewsResult.setDefaultView(unresolvedView); } - private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); - boolean isNativeUser = - corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); - corpUser.setIsNativeUser(isNativeUser); + return viewsResult; + } + + private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserKey corpUserKey = new CorpUserKey(dataMap); + corpUser.setUsername(corpUserKey.getUsername()); + } + + private void mapCorpUserInfo( + @Nullable QueryContext context, + @Nonnull CorpUser corpUser, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); + corpUser.setProperties(CorpUserPropertiesMapper.map(context, corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(context, corpUserInfo)); + CorpUserProperties corpUserProperties = corpUser.getProperties(); + if (corpUserInfo.hasCustomProperties()) { + corpUserProperties.setCustomProperties( + CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); } + corpUser.setProperties(corpUserProperties); + } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null + && corpUserCredentials.hasSalt() + && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index c64406a74733bc..738ae68cd756d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -1,25 +1,27 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ -public class CorpUserPropertiesMapper implements ModelMapper { +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ +public class CorpUserPropertiesMapper + implements ModelMapper { public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); - public static CorpUserProperties map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); + public static CorpUserProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(context, corpUserInfo); } @Override - public CorpUserProperties apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + public CorpUserProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { final CorpUserProperties result = new CorpUserProperties(); result.setActive(info.isActive()); result.setCountryCode(info.getCountryCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index d0644fbfdacec0..eb31754a9f0f0a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -1,21 +1,26 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class CorpUserStatusMapper implements ModelMapper { +public class CorpUserStatusMapper + implements ModelMapper { public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); - public static CorpUserStatus map(@Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { - return INSTANCE.apply(corpUserStatus); + public static CorpUserStatus map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { + return INSTANCE.apply(context, corpUserStatus); } @Override - public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus status) { + public CorpUserStatus apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserStatus status) { // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 104c7c004cb664..89a41732109964 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dashboard; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -38,7 +41,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -55,191 +57,220 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DashboardType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DASHBOARD_KEY_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME, + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); -public class DashboardType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DASHBOARD_KEY_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME, - EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); - - private final EntityClient _entityClient; - - public DashboardType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return DashboardUpdateInput.class; - } + public DashboardType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DASHBOARD; - } + @Override + public Class inputClass() { + return DashboardUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.DASHBOARD; + } - @Override - public Class objectClass() { - return Dashboard.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dashboardMap = - _entityClient.batchGetV2( - Constants.DASHBOARD_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dashboardMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDashboard -> gmsDashboard == null ? null : DataFetcherResult.newResult() - .data(DashboardMapper.map(gmsDashboard)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Dashboards", e); - } - } + @Override + public Class objectClass() { + return Dashboard.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dashboard", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dashboardMap = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DASHBOARD_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(urnStrs.size()); + for (Urn urn : urns) { + gmsResults.add(dashboardMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDashboard -> + gmsDashboard == null + ? null + : DataFetcherResult.newResult() + .data(DashboardMapper.map(context, gmsDashboard)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Dashboards", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dashboard", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "dashboard", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } - private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { - try { - return DashboardUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); - } - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dashboard", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(context.getOperationContext(), getDashboardUrn(urn)); + return BrowsePathsMapper.map(context, result); + } - @Override - public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DashboardUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { + try { + return DashboardUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); } + } + + @Override + public Dashboard update( + @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + DashboardUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final DashboardUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DashboardUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 32e4341ece4aac..4fa52b11365641 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InputFields; @@ -14,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; @@ -26,18 +32,21 @@ import com.linkedin.datahub.graphql.types.chart.mappers.InputFieldsMapper; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -45,172 +54,246 @@ import com.linkedin.metadata.key.DashboardKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; +public class DashboardMapper implements ModelMapper { + public static final DashboardMapper INSTANCE = new DashboardMapper(); -public class DashboardMapper implements ModelMapper { + public static Dashboard map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - public static final DashboardMapper INSTANCE = new DashboardMapper(); + @Override + public Dashboard apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final Dashboard result = new Dashboard(); + Urn entityUrn = entityResponse.getUrn(); - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DASHBOARD); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapDashboardInfo(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DashboardMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DashboardMapper::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dashboard, dataMap) -> + dashboard.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { - final Dashboard result = new Dashboard(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DASHBOARD); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dashboard.class); + } else { + return mappingHelper.getResult(); } + } - private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final DashboardKey gmsKey = new DashboardKey(dataMap); - dashboard.setDashboardId(gmsKey.getDashboardId()); - dashboard.setTool(gmsKey.getDashboardTool()); - dashboard.setPlatform(DataPlatform.builder() + private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final DashboardKey gmsKey = new DashboardKey(dataMap); + dashboard.setDashboardId(gmsKey.getDashboardId()); + dashboard.setTool(gmsKey.getDashboardTool()); + dashboard.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapDashboardInfo(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); - } + private void mapDashboardInfo( + @Nonnull QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + Urn entityUrn) { + final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = + new com.linkedin.dashboard.DashboardInfo(dataMap); + dashboard.setInfo(mapInfo(context, gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(context, gmsDashboardInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link DashboardInfo} - */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardInfo result = new DashboardInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - result.setCharts(info.getCharts().stream().map(urn -> { - final Chart chart = new Chart(); - chart.setUrn(urn.toString()); - return chart; - }).collect(Collectors.toList())); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link + * DashboardInfo} + */ + private static DashboardInfo mapInfo( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { + final DashboardInfo result = new DashboardInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + result.setCharts( + info.getCharts().stream() + .map( + urn -> { + final Chart chart = new Chart(); + chart.setUrn(urn.toString()); + return chart; + }) + .collect(Collectors.toList())); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link DashboardProperties} - */ - private DashboardProperties mapDashboardInfoToProperties(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardProperties result = new DashboardProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableDashboardProperties(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(dataMap); - final DashboardEditableProperties dashboardEditableProperties = new DashboardEditableProperties(); - dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); - dashboard.setEditableProperties(dashboardEditableProperties); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } + return result; + } - private void mapGlobalTags(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dashboard.setGlobalTags(globalTags); - dashboard.setTags(globalTags); + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link + * DashboardProperties} + */ + private static DashboardProperties mapDashboardInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { + final DashboardProperties result = new DashboardProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); + } + return result; + } + + private void mapEditableDashboardProperties( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(dataMap); + final DashboardEditableProperties dashboardEditableProperties = + new DashboardEditableProperties(); + dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); + dashboard.setEditableProperties(dashboardEditableProperties); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + dashboard.setGlobalTags(globalTags); + dashboard.setTags(globalTags); + } - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dashboard.setContainer(Container - .builder() + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dashboard.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); - } - - private void mapSubTypes(@Nonnull Dashboard dashboard, DataMap dataMap) { - SubTypes pegasusSubTypes = new SubTypes(dataMap); - if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); - subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); - dashboard.setSubTypes(subTypes); - } - } + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dashboard.setDomain(DomainAssociationMapper.map(context, domains, dashboard.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index f084dbc0bc09f3..d004fb70d41052 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -16,68 +19,71 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; +public class DashboardUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dashboardUpdateInput, actor); + } -public class DashboardUpdateInputMapper implements - InputModelMapper, Urn> { - public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); - - public static Collection map(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); - } + @Override + public Collection apply( + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { - @Override - public Collection apply(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - - if (dashboardUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } - - if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dashboardUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } else { - // Tags override global tags - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dashboardUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, dashboardUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dashboardUpdateInput.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); - editableDashboardProperties.setDescription(dashboardUpdateInput.getEditableProperties().getDescription()); - if (!editableDashboardProperties.hasCreated()) { - editableDashboardProperties.setCreated(auditStamp); - } - editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); - } + if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dashboardUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); + } else { + // Tags override global tags + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dashboardUpdateInput.getEditableProperties() != null) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); + editableDashboardProperties.setDescription( + dashboardUpdateInput.getEditableProperties().getDescription()); + if (!editableDashboardProperties.hasCreated()) { + editableDashboardProperties.setCreated(auditStamp); + } + editableDashboardProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index d257aef4be565e..a5abb57672b42b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -1,25 +1,30 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); - public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + public static DashboardUsageMetrics map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override - public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { + public DashboardUsageMetrics apply( + @Nullable QueryContext context, EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), com.linkedin.dashboard.DashboardUsageStatistics.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + com.linkedin.dashboard.DashboardUsageStatistics.class); final com.linkedin.datahub.graphql.generated.DashboardUsageMetrics dashboardUsageMetrics = new com.linkedin.datahub.graphql.generated.DashboardUsageMetrics(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapper.java new file mode 100644 index 00000000000000..1fe65beed6e927 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapper.java @@ -0,0 +1,112 @@ +package com.linkedin.datahub.graphql.types.datacontract; + +import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.DataContractProperties; +import com.linkedin.datahub.graphql.generated.DataContractState; +import com.linkedin.datahub.graphql.generated.DataContractStatus; +import com.linkedin.datahub.graphql.generated.DataQualityContract; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FreshnessContract; +import com.linkedin.datahub.graphql.generated.SchemaContract; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataContractMapper { + + public static DataContract mapContract(@Nonnull final EntityResponse entityResponse) { + final DataContract result = new DataContract(); + final EnvelopedAspectMap aspects = entityResponse.getAspects(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_CONTRACT); + + final EnvelopedAspect dataContractProperties = + aspects.get(Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME); + if (dataContractProperties != null) { + result.setProperties( + mapProperties( + new com.linkedin.datacontract.DataContractProperties( + dataContractProperties.getValue().data()))); + } else { + throw new RuntimeException( + String.format("Data Contract does not exist!. urn: %s", entityResponse.getUrn())); + } + + final EnvelopedAspect dataContractStatus = + aspects.get(Constants.DATA_CONTRACT_STATUS_ASPECT_NAME); + if (dataContractStatus != null) { + result.setStatus( + mapStatus( + new com.linkedin.datacontract.DataContractStatus( + dataContractStatus.getValue().data()))); + } + + return result; + } + + private static DataContractProperties mapProperties( + final com.linkedin.datacontract.DataContractProperties properties) { + final DataContractProperties result = new DataContractProperties(); + result.setEntityUrn(properties.getEntity().toString()); + if (properties.hasSchema()) { + result.setSchema( + properties.getSchema().stream() + .map(DataContractMapper::mapSchemaContract) + .collect(Collectors.toList())); + } + if (properties.hasFreshness()) { + result.setFreshness( + properties.getFreshness().stream() + .map(DataContractMapper::mapFreshnessContract) + .collect(Collectors.toList())); + } + if (properties.hasDataQuality()) { + result.setDataQuality( + properties.getDataQuality().stream() + .map(DataContractMapper::mapDataQualityContract) + .collect(Collectors.toList())); + } + return result; + } + + private static DataContractStatus mapStatus( + final com.linkedin.datacontract.DataContractStatus status) { + final DataContractStatus result = new DataContractStatus(); + result.setState(DataContractState.valueOf(status.getState().toString())); + return result; + } + + private static SchemaContract mapSchemaContract( + final com.linkedin.datacontract.SchemaContract schemaContract) { + final SchemaContract result = new SchemaContract(); + final Assertion partialAssertion = new Assertion(); + partialAssertion.setUrn(schemaContract.getAssertion().toString()); + result.setAssertion(partialAssertion); + return result; + } + + private static FreshnessContract mapFreshnessContract( + final com.linkedin.datacontract.FreshnessContract freshnessContract) { + final FreshnessContract result = new FreshnessContract(); + final Assertion partialAssertion = new Assertion(); + partialAssertion.setUrn(freshnessContract.getAssertion().toString()); + result.setAssertion(partialAssertion); + return result; + } + + private static DataQualityContract mapDataQualityContract( + final com.linkedin.datacontract.DataQualityContract qualityContract) { + final DataQualityContract result = new DataQualityContract(); + final Assertion partialAssertion = new Assertion(); + partialAssertion.setUrn(qualityContract.getAssertion().toString()); + result.setAssertion(partialAssertion); + return result; + } + + private DataContractMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractType.java new file mode 100644 index 00000000000000..7f1756610baf71 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datacontract/DataContractType.java @@ -0,0 +1,84 @@ +package com.linkedin.datahub.graphql.types.datacontract; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataContractType + implements com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_CONTRACT_KEY_ASPECT_NAME, + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + Constants.DATA_CONTRACT_STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataContractType(final EntityClient entityClient) { + _entityClient = Objects.requireNonNull(entityClient, "entityClient must not be null"); + } + + @Override + public EntityType type() { + return EntityType.DATA_CONTRACT; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataContract.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataContractUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_CONTRACT_ENTITY_NAME, + new HashSet<>(dataContractUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataContractUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataContractMapper.mapContract(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Contracts", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 54f7660064c051..f8248aedf289c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataflow; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -38,7 +41,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +56,207 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataFlowType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_FLOW_KEY_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); + private final EntityClient _entityClient; -public class DataFlowType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_FLOW_KEY_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); - private final EntityClient _entityClient; - - public DataFlowType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataFlowType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_FLOW; - } + @Override + public EntityType type() { + return EntityType.DATA_FLOW; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataFlow.class; - } + @Override + public Class objectClass() { + return DataFlow.class; + } - @Override - public Class inputClass() { - return DataFlowUpdateInput.class; - } + @Override + public Class inputClass() { + return DataFlowUpdateInput.class; + } - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataFlowMap = - _entityClient.batchGetV2( - Constants.DATA_FLOW_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataFlowMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataFlow -> gmsDataFlow == null ? null : DataFetcherResult.newResult() - .data(DataFlowMapper.map(gmsDataFlow)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Flows", e); - } - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dataFlowMap = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_FLOW_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dataFlow", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final List gmsResults = new ArrayList<>(urnStrs.size()); + for (Urn urn : urns) { + gmsResults.add(dataFlowMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataFlow -> + gmsDataFlow == null + ? null + : DataFetcherResult.newResult() + .data(DataFlowMapper.map(context, gmsDataFlow)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Flows", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "dataFlow", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "dataFlow", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), "dataFlow", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), DataFlowUrn.createFromString(urn)); + return BrowsePathsMapper.map(context, result); + } - @Override - public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) throws Exception { + @Override + public DataFlow update( + @Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) + throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataFlowUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + DataFlowUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 719fa9f0b2bf01..9e2612f60abda1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -10,6 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties; import com.linkedin.datahub.graphql.generated.DataFlowInfo; @@ -17,17 +23,19 @@ import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datajob.EditableDataFlowProperties; import com.linkedin.domain.Domains; @@ -36,122 +44,171 @@ import com.linkedin.metadata.key.DataFlowKey; import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class DataFlowMapper implements ModelMapper { - public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { - final DataFlow result = new DataFlow(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_FLOW); - Urn entityUrn = entityResponse.getUrn(); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); + public static final DataFlowMapper INSTANCE = new DataFlowMapper(); + + public static DataFlow map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public DataFlow apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataFlow result = new DataFlow(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_FLOW); + Urn entityUrn = entityResponse.getUrn(); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataFlow, dataMap) -> mapGlobalTags(context, dataFlow, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DataFlowMapper::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), DataFlow.class); + } else { + return mappingHelper.getResult(); } - - private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final DataFlowKey gmsKey = new DataFlowKey(dataMap); - dataFlow.setOrchestrator(gmsKey.getOrchestrator()); - dataFlow.setFlowId(gmsKey.getFlowId()); - dataFlow.setCluster(gmsKey.getCluster()); - dataFlow.setPlatform(DataPlatform.builder() + } + + private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final DataFlowKey gmsKey = new DataFlowKey(dataMap); + dataFlow.setOrchestrator(gmsKey.getOrchestrator()); + dataFlow.setFlowId(gmsKey.getFlowId()); + dataFlow.setCluster(gmsKey.getCluster()); + dataFlow.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getOrchestrator()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } - - private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = new com.linkedin.datajob.DataFlowInfo(dataMap); - dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); - dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); - } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} - */ - private DataFlowInfo mapDataFlowInfo(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowInfo result = new DataFlowInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getOrchestrator()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = + new com.linkedin.datajob.DataFlowInfo(dataMap); + dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); + dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} + */ + private DataFlowInfo mapDataFlowInfo( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowInfo result = new DataFlowInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} - */ - private DataFlowProperties mapDataFlowInfoToProperties(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowProperties result = new DataFlowProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(dataMap); - final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); - dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); - dataFlow.setEditableProperties(dataFlowEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} + */ + private DataFlowProperties mapDataFlowInfoToProperties( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowProperties result = new DataFlowProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - private void mapGlobalTags(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataFlow.setGlobalTags(globalTags); - dataFlow.setTags(globalTags); - } - - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } + return result; + } + + private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(dataMap); + final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); + dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); + dataFlow.setEditableProperties(dataFlowEditableProperties); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull DataFlow dataFlow, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + dataFlow.setGlobalTags(globalTags); + dataFlow.setTags(globalTags); + } + + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + dataFlow.setDomain(DomainAssociationMapper.map(context, domains, dataFlow.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index c966fc8338ed49..cb9b6f66c6eabf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -16,21 +19,22 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowUpdateInputMapper implements InputModelMapper, Urn> { +public class DataFlowUpdateInputMapper + implements InputModelMapper, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); - public static Collection map(@Nonnull final DataFlowUpdateInput dataFlowUpdateInput, + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dataFlowUpdateInput, actor); + return INSTANCE.apply(context, dataFlowUpdateInput, actor); } @Override public Collection apply( + @Nullable final QueryContext context, @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); @@ -41,7 +45,8 @@ public Collection apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( - updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,28 +55,29 @@ public Collection apply( if (dataFlowUpdateInput.getGlobalTags() != null) { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getGlobalTags().getTags().stream() + .map(t -> TagAssociationUpdateMapper.map(context, t)) + .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getTags().getTags().stream() + .map(t -> TagAssociationUpdateMapper.map(context, t)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (dataFlowUpdateInput.getEditableProperties() != null) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(); - editableDataFlowProperties.setDescription(dataFlowUpdateInput.getEditableProperties().getDescription()); + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(); + editableDataFlowProperties.setDescription( + dataFlowUpdateInput.getEditableProperties().getDescription()); editableDataFlowProperties.setCreated(auditStamp); editableDataFlowProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataFlowProperties, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataFlowProperties, EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index bde79f6dce6e8c..1e1de615b5911b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.datajob; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -38,7 +41,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,177 +56,208 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataJobType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_JOB_KEY_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("flow"); + private final EntityClient _entityClient; -public class DataJobType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_JOB_KEY_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("flow"); - private final EntityClient _entityClient; - - public DataJobType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataJobType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_JOB; - } + @Override + public EntityType type() { + return EntityType.DATA_JOB; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataJob.class; - } + @Override + public Class objectClass() { + return DataJob.class; + } - @Override - public Class inputClass() { - return DataJobUpdateInput.class; - } + @Override + public Class inputClass() { + return DataJobUpdateInput.class; + } - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataJobMap = _entityClient.batchGetV2( - Constants.DATA_JOB_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataJobMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataJob -> gmsDataJob == null ? null : DataFetcherResult.newResult() - .data(DataJobMapper.map(gmsDataJob)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Jobs", e); - } - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "dataJob", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + final Map dataJobMap = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_JOB_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(urnStrs.size()); + for (Urn urn : urns) { + gmsResults.add(dataJobMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataJob -> + gmsDataJob == null + ? null + : DataFetcherResult.newResult() + .data(DataJobMapper.map(context, gmsDataJob)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Jobs", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), "dataJob", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(context.getOperationContext(), "dataJob", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } - @Override - public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataJobUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataJob", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), DataJobUrn.createFromString(urn)); + return BrowsePathsMapper.map(context, result); + } + + @Override + public DataJob update( + @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + DataJobUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 4845fc18763488..d7da875bc2a29f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,16 +1,23 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataJobEditableProperties; @@ -20,161 +27,204 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datajob.EditableDataJobProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.key.DataJobKey; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class DataJobMapper implements ModelMapper { - public static final DataJobMapper INSTANCE = new DataJobMapper(); + public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static DataJob map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { - final DataJob result = new DataJob(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public DataJob apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataJob result = new DataJob(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_JOB); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_JOB); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - entityResponse.getAspects().forEach((name, aspect) -> { - DataMap data = aspect.getValue().data(); - if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { + entityResponse + .getAspects() + .forEach( + (name, aspect) -> { + DataMap data = aspect.getValue().data(); + if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow(new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + if (context == null || canView(context.getOperationContext(), gmsKey.getFlow())) { + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + } result.setJobId(gmsKey.getJobId()); - } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = new com.linkedin.datajob.DataJobInfo(data); + } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = + new com.linkedin.datajob.DataJobInfo(data); result.setInfo(mapDataJobInfo(gmsDataJobInfo, entityUrn)); result.setProperties(mapDataJobInfoToProperties(gmsDataJobInfo, entityUrn)); - } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = new com.linkedin.datajob.DataJobInputOutput(data); + } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = + new com.linkedin.datajob.DataJobInputOutput(data); result.setInputOutput(mapDataJobInputOutput(gmsDataJobInputOutput)); - } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(data); - final DataJobEditableProperties dataJobEditableProperties = new DataJobEditableProperties(); - dataJobEditableProperties.setDescription(editableDataJobProperties.getDescription()); + } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { + final EditableDataJobProperties editableDataJobProperties = + new EditableDataJobProperties(data); + final DataJobEditableProperties dataJobEditableProperties = + new DataJobEditableProperties(); + dataJobEditableProperties.setDescription( + editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); - } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { - result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); - } else if (STATUS_ASPECT_NAME.equals(name)) { - result.setStatus(StatusMapper.map(new Status(data))); - } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { + result.setOwnership(OwnershipMapper.map(context, new Ownership(data), entityUrn)); + } else if (STATUS_ASPECT_NAME.equals(name)) { + result.setStatus(StatusMapper.map(context, new Status(data))); + } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); - } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); - } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); - } else if (DOMAINS_ASPECT_NAME.equals(name)) { + } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(data), entityUrn)); + } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(data), entityUrn)); + } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); - } else if (DEPRECATION_ASPECT_NAME.equals(name)) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); - } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); - } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { - result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); - } - }); - - return result; + result.setDomain( + DomainAssociationMapper.map(context, domains, entityUrn.toString())); + } else if (DEPRECATION_ASPECT_NAME.equals(name)) { + result.setDeprecation(DeprecationMapper.map(context, new Deprecation(data))); + } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); + } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { + result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data))); + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + result.setSubTypes(SubTypesMapper.map(context, new SubTypes(data))); + } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(name)) { + result.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(data))); + } else if (FORMS_ASPECT_NAME.equals(name)) { + result.setForms(FormsMapper.map(new Forms(data), entityUrn.toString())); + } + }); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataJob.class); + } else { + return result; } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} - */ - private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobInfo result = new DataJobInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ + private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobInfo result = new DataJobInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} - */ - private DataJobProperties mapDataJobInfoToProperties(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobProperties result = new DataJobProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} */ + private DataJobProperties mapDataJobInfoToProperties( + final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobProperties result = new DataJobProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private DataJobInputOutput mapDataJobInputOutput( + final com.linkedin.datajob.DataJobInputOutput inputOutput) { + final DataJobInputOutput result = new DataJobInputOutput(); + if (inputOutput.hasInputDatasets()) { + result.setInputDatasets( + inputOutput.getInputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatasets(ImmutableList.of()); + } + if (inputOutput.hasOutputDatasets()) { + result.setOutputDatasets( + inputOutput.getOutputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setOutputDatasets(ImmutableList.of()); + } + if (inputOutput.hasInputDatajobs()) { + result.setInputDatajobs( + inputOutput.getInputDatajobs().stream() + .map( + urn -> { + final DataJob dataJob = new DataJob(); + dataJob.setUrn(urn.toString()); + return dataJob; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatajobs(ImmutableList.of()); } - private DataJobInputOutput mapDataJobInputOutput(final com.linkedin.datajob.DataJobInputOutput inputOutput) { - final DataJobInputOutput result = new DataJobInputOutput(); - if (inputOutput.hasInputDatasets()) { - result.setInputDatasets(inputOutput.getInputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setInputDatasets(ImmutableList.of()); - } - if (inputOutput.hasOutputDatasets()) { - result.setOutputDatasets(inputOutput.getOutputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setOutputDatasets(ImmutableList.of()); - } - if (inputOutput.hasInputDatajobs()) { - result.setInputDatajobs(inputOutput.getInputDatajobs().stream().map(urn -> { - final DataJob dataJob = new DataJob(); - dataJob.setUrn(urn.toString()); - return dataJob; - }).collect(Collectors.toList())); - } else { - result.setInputDatajobs(ImmutableList.of()); - } - - if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { - result.setFineGrainedLineages(FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); - } - - return result; + if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { + result.setFineGrainedLineages( + FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b075c42d411fb3..a1d0123d3a5211 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataJobUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -16,64 +19,67 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobUpdateInputMapper implements InputModelMapper, Urn> { - public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); +public class DataJobUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); - public static Collection map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); - } - - @Override - public Collection apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dataJobUpdateInput, actor); + } - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); + @Override + public Collection apply( + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); - if (dataJobUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dataJobUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } else { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dataJobUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, dataJobUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dataJobUpdateInput.getEditableProperties() != null) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); - editableDataJobProperties.setDescription(dataJobUpdateInput.getEditableProperties().getDescription()); - editableDataJobProperties.setCreated(auditStamp); - editableDataJobProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataJobProperties, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); - } + if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dataJobUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getGlobalTags().getTags().stream() + .map(t -> TagAssociationUpdateMapper.map(context, t)) + .collect(Collectors.toList()))); + } else { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getTags().getTags().stream() + .map(t -> TagAssociationUpdateMapper.map(context, t)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dataJobUpdateInput.getEditableProperties() != null) { + final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); + editableDataJobProperties.setDescription( + dataJobUpdateInput.getEditableProperties().getDescription()); + editableDataJobProperties.setCreated(auditStamp); + editableDataJobProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataJobProperties, EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); } + + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 57a035d136645c..921b1ab3b5edd1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,56 +19,60 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformType implements EntityType { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public DataPlatformType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataPlatformType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public Class objectClass() { - return DataPlatform.class; - } + @Override + public Class objectClass() { + return DataPlatform.class; + } - @Override - public List> batchLoad(final List urns, final QueryContext context) { + @Override + public List> batchLoad( + final List urns, final QueryContext context) { - final List dataPlatformUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final List dataPlatformUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map dataPlatformMap = _entityClient.batchGetV2( - DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), null, context.getAuthentication()); + try { + final Map dataPlatformMap = + _entityClient.batchGetV2( + context.getOperationContext(), + DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + null); - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformUrns) { - gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformUrns) { + gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); + } - return gmsResults.stream() - .map(gmsPlatform -> gmsPlatform == null ? null - : DataFetcherResult.newResult() - .data(DataPlatformMapper.map(gmsPlatform)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Platforms", e); - } + return gmsResults.stream() + .map( + gmsPlatform -> + gmsPlatform == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformMapper.map(context, gmsPlatform)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Platforms", e); } + } - @Override - public com.linkedin.datahub.graphql.generated.EntityType type() { - return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; - } + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index 011fb83cddb334..a7c765f5dcbf68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -1,30 +1,37 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInfo; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Deprecated -public class DataPlatformInfoMapper implements ModelMapper { +public class DataPlatformInfoMapper + implements ModelMapper { - public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); + public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); - public static DataPlatformInfo map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); - } + public static DataPlatformInfo map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(context, platform); + } - @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformInfo result = new DataPlatformInfo(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.hasDisplayName()) { - result.setDisplayName(input.getDisplayName()); - } - if (input.hasLogoUrl()) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + @Override + public DataPlatformInfo apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformInfo result = new DataPlatformInfo(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.hasDisplayName()) { + result.setDisplayName(input.getDisplayName()); + } + if (input.hasLogoUrl()) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index 8df44e8f6e9e98..df3fc7fb6434e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -12,37 +15,44 @@ import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class DataPlatformMapper implements ModelMapper { - public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); - } - - @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { - final DataPlatform result = new DataPlatform(); - final DataPlatformKey dataPlatformKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKeyInternal(entityResponse.getUrn(), - new DataPlatformKey().schema()); - result.setType(EntityType.DATA_PLATFORM); - Urn urn = entityResponse.getUrn(); - result.setUrn(urn.toString()); - result.setName(dataPlatformKey.getPlatformName()); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PLATFORM_KEY_ASPECT_NAME, (dataPlatform, dataMap) -> + public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); + + public static DataPlatform map( + @Nullable QueryContext context, @Nonnull final EntityResponse platform) { + return INSTANCE.apply(context, platform); + } + + @Override + public DataPlatform apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataPlatform result = new DataPlatform(); + final DataPlatformKey dataPlatformKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + entityResponse.getUrn(), new DataPlatformKey().schema()); + result.setType(EntityType.DATA_PLATFORM); + Urn urn = entityResponse.getUrn(); + result.setUrn(urn.toString()); + result.setName(dataPlatformKey.getPlatformName()); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PLATFORM_KEY_ASPECT_NAME, + (dataPlatform, dataMap) -> dataPlatform.setName(new DataPlatformKey(dataMap).getPlatformName())); - mappingHelper.mapToResult(DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> - dataPlatform.setProperties(DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + DATA_PLATFORM_INFO_ASPECT_NAME, + (dataPlatform, dataMap) -> + dataPlatform.setProperties( + DataPlatformPropertiesMapper.map(context, new DataPlatformInfo(dataMap)))); + return mappingHelper.getResult(); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index c0a236dc1a4025..0043ad65ee5db9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -1,31 +1,36 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformProperties; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +public class DataPlatformPropertiesMapper + implements ModelMapper { -public class DataPlatformPropertiesMapper implements ModelMapper { + public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); - public static final DataPlatformPropertiesMapper - INSTANCE = new DataPlatformPropertiesMapper(); + public static DataPlatformProperties map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(context, platform); + } - public static DataPlatformProperties map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + @Override + public DataPlatformProperties apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformProperties result = new DataPlatformProperties(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public DataPlatformProperties apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformProperties result = new DataPlatformProperties(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getLogoUrl() != null) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + if (input.getLogoUrl() != null) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 87614e13325283..7a5a88ae3a30ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,19 +12,15 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.dataplatforminstance.mappers.DataPlatformInstanceMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; -import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,90 +28,104 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; - -public class DataPlatformInstanceType implements SearchableEntityType, +public class DataPlatformInstanceType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME - ); - private final EntityClient _entityClient; - - public DataPlatformInstanceType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.DATA_PLATFORM_INSTANCE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return DataPlatformInstance.class; + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataPlatformInstanceType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.DATA_PLATFORM_INSTANCE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataPlatformInstance.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataPlatformInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + new HashSet<>(dataPlatformInstanceUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformInstanceMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to batch load DataPlatformInstance", e); } - - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List dataPlatformInstanceUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, - new HashSet<>(dataPlatformInstanceUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - - } catch (Exception e) { - throw new RuntimeException("Failed to batch load DataPlatformInstance", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, - filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), + DATA_PLATFORM_INSTANCE_ENTITY_NAME, + query, + filters, + limit); + return AutoCompleteResultsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index ba49f23133f9e2..ed9bf0c82d869b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -1,39 +1,42 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance.mappers; -import com.linkedin.common.Ownership; +import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; import com.linkedin.common.Status; -import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; -import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformInstanceMapper { public static final DataPlatformInstanceMapper INSTANCE = new DataPlatformInstanceMapper(); - public static DataPlatformInstance map(final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataPlatformInstance map( + @Nullable QueryContext context, final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } - public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) { + public DataPlatformInstance apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataPlatformInstance result = new DataPlatformInstance(); final Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityUrn.toString()); @@ -41,65 +44,78 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) final EnvelopedAspectMap aspects = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspects, result); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - this::mapDataPlatformInstanceKey - ); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.OWNERSHIP_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.GLOBAL_TAGS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.STATUS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap))) - ); - mappingHelper.mapToResult(Constants.DEPRECATION_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap))) - ); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, this::mapDataPlatformInstanceKey); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.OWNERSHIP_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.GLOBAL_TAGS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapGlobalTags(context, dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.STATUS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + Constants.DEPRECATION_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); return mappingHelper.getResult(); } - private void mapDataPlatformInstanceKey(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { + private void mapDataPlatformInstanceKey( + @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { final DataPlatformInstanceKey gmsKey = new DataPlatformInstanceKey(dataMap); - dataPlatformInstance.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()) - .build()); + dataPlatformInstance.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); dataPlatformInstance.setInstanceId(gmsKey.getInstance()); } private void mapDataPlatformInstanceProperties( - @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn - ) { - final DataPlatformInstanceProperties gmsProperties = new DataPlatformInstanceProperties(dataMap); + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final DataPlatformInstanceProperties gmsProperties = + new DataPlatformInstanceProperties(dataMap); final com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties properties = - new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); + new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); properties.setName(gmsProperties.getName()); properties.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private static void mapGlobalTags( + @Nullable QueryContext context, + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index ee014f9f665719..7a4d342281fe54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; @@ -11,44 +14,47 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DataProcessInstanceMapper implements ModelMapper { - public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); - - public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); + + public static DataProcessInstance map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public DataProcessInstance apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataProcessInstance result = new DataProcessInstance(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_PROCESS_INSTANCE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + context, DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + + return mappingHelper.getResult(); + } + + private void mapDataProcessProperties( + @Nonnull QueryContext context, @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + DataProcessInstanceProperties dataProcessInstanceProperties = + new DataProcessInstanceProperties(dataMap); + dpi.setName(dataProcessInstanceProperties.getName()); + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(context, dataProcessInstanceProperties.getCreated())); } - - @Override - public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { - final DataProcessInstance result = new DataProcessInstance(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_PROCESS_INSTANCE); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); - - return mappingHelper.getResult(); - } - - private void mapDataProcessProperties(@Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { - DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); - dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); - } - if (dataProcessInstanceProperties.hasExternalUrl()) { - dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); - } + if (dataProcessInstanceProperties.hasExternalUrl()) { + dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index ca9a77f7e45cb8..3c8639c07c0360 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -1,41 +1,51 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataprocess.DataProcessInstanceRunEvent; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - - -public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper { - - public static final DataProcessInstanceRunEventMapper INSTANCE = new DataProcessInstanceRunEventMapper(); - - public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); +import javax.annotation.Nullable; + +public class DataProcessInstanceRunEventMapper + implements TimeSeriesAspectMapper { + + public static final DataProcessInstanceRunEventMapper INSTANCE = + new DataProcessInstanceRunEventMapper(); + + public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + + DataProcessInstanceRunEvent runEvent = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + DataProcessInstanceRunEvent.class); + + final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = + new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); + + result.setTimestampMillis(runEvent.getTimestampMillis()); + result.setAttempt(runEvent.getAttempt()); + if (runEvent.hasStatus()) { + result.setStatus( + com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf( + runEvent.getStatus().toString())); } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - DataProcessInstanceRunEvent runEvent = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - DataProcessInstanceRunEvent.class); - - final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = - new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); - - result.setTimestampMillis(runEvent.getTimestampMillis()); - result.setAttempt(runEvent.getAttempt()); - if (runEvent.hasStatus()) { - result.setStatus(com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf(runEvent.getStatus().toString())); - } - if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); - } - - return result; + if (runEvent.hasResult()) { + result.setResult(DataProcessInstanceRunResultMapper.map(context, runEvent.getResult())); } + if (runEvent.hasDurationMillis()) { + result.setDurationMillis(runEvent.getDurationMillis()); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 91b03eea2745f2..7026856503a0bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -1,34 +1,40 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResultType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +public class DataProcessInstanceRunResultMapper + implements ModelMapper< + DataProcessInstanceRunResult, + com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { -public class DataProcessInstanceRunResultMapper implements ModelMapper< - DataProcessInstanceRunResult, com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { + public static final DataProcessInstanceRunResultMapper INSTANCE = + new DataProcessInstanceRunResultMapper(); - public static final DataProcessInstanceRunResultMapper INSTANCE = new DataProcessInstanceRunResultMapper(); + public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(context, input); + } - public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map(@Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); - } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply(@Nonnull final DataProcessInstanceRunResult input) { - - final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = - new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); + @Override + public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { - if (input.hasType()) { - result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); - } + final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); - if (input.hasNativeResultType()) { - result.setNativeResultType(input.getNativeResultType()); - } + if (input.hasType()) { + result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); + } - return result; + if (input.hasNativeResultType()) { + result.setNativeResultType(input.getNativeResultType()); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index eb8ca23f00b37a..8463f2e05dfc90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -1,5 +1,15 @@ package com.linkedin.datahub.graphql.types.dataproduct; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,11 +28,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import lombok.RequiredArgsConstructor; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,26 +35,25 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import org.apache.commons.lang3.NotImplementedException; @RequiredArgsConstructor -public class DataProductType implements SearchableEntityType, - com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DataProductType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -68,22 +72,31 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { - final List dataProductUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataProductUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATA_PRODUCT_ENTITY_NAME, new HashSet<>(dataProductUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + DATA_PRODUCT_ENTITY_NAME, + new HashSet<>(dataProductUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : dataProductUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataProductMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataProductMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); @@ -91,22 +104,28 @@ public List> batchLoad(@Nonnull List urns } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), DATA_PRODUCT_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Data Product entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Data Product entity type"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 254b43ecb96ccb..08637dbfd01edc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,16 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -7,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; @@ -14,34 +27,31 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; - +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - +import javax.annotation.Nullable; public class DataProductMapper implements ModelMapper { public static final DataProductMapper INSTANCE = new DataProductMapper(); - public static DataProduct map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataProduct map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataProduct apply(@Nonnull final EntityResponse entityResponse) { + public DataProduct apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataProduct result = new DataProduct(); Urn entityUrn = entityResponse.getUrn(); @@ -50,27 +60,59 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, (dataProduct, dataMap) -> - mapDataProductProperties(dataProduct, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setDomain(DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + (dataProduct, dataMap) -> mapDataProductProperties(dataProduct, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + DOMAINS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setDomain( + DomainAssociationMapper.map(context, new Domains(dataMap), dataProduct.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataProduct.class); + } else { + return result; + } } - private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { + private void mapDataProductProperties( + @Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { DataProductProperties dataProductProperties = new DataProductProperties(dataMap); - com.linkedin.datahub.graphql.generated.DataProductProperties properties = new com.linkedin.datahub.graphql.generated.DataProductProperties(); + com.linkedin.datahub.graphql.generated.DataProductProperties properties = + new com.linkedin.datahub.graphql.generated.DataProductProperties(); - final String name = dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); + final String name = + dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); properties.setName(name); properties.setDescription(dataProductProperties.getDescription()); if (dataProductProperties.hasExternalUrl()) { @@ -81,7 +123,9 @@ private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull } else { properties.setNumAssets(0); } - properties.setCustomProperties(CustomPropertiesMapper.map(dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); dataProduct.setProperties(properties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 0fc4399ac902d7..46c810ac00d621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -8,19 +13,17 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; -import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -38,12 +41,10 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -56,234 +57,272 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetType implements SearchableEntityType, BrowsableEntityType, +public class DatasetType + implements SearchableEntityType, + BrowsableEntityType, BatchMutableType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME - ); - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private static final String ENTITY_NAME = "dataset"; - - private final EntityClient _entityClient; - - public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Dataset.class; + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + ACCESS_DATASET_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private static final String ENTITY_NAME = "dataset"; + + private final EntityClient entityClient; + + public DatasetType(final EntityClient entityClient) { + this.entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Dataset.class; + } + + @Override + public Class inputClass() { + return DatasetUpdateInput.class; + } + + @Override + public Class batchInputClass() { + return BatchDatasetUpdateInput[].class; + } + + @Override + public EntityType type() { + return EntityType.DATASET; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + @Nonnull final List urnStrs, @Nonnull final QueryContext context) { + try { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map datasetMap = + entityClient.batchGetV2( + context.getOperationContext(), + Constants.DATASET_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); + + final List gmsResults = new ArrayList<>(urnStrs.size()); + for (Urn urn : urns) { + gmsResults.add(datasetMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(DatasetMapper.map(context, gmsDataset)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } - - @Override - public Class inputClass() { - return DatasetUpdateInput.class; - } - - @Override - public Class batchInputClass() { - return BatchDatasetUpdateInput[].class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ENTITY_NAME, + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "dataset", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + entityClient.getBrowsePaths(context.getOperationContext(), DatasetUtils.getDatasetUrn(urn)); + return BrowsePathsMapper.map(context, result); + } + + @Override + public List batchUpdate( + @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { + final Urn actor = Urn.createFromString(context.getActorUrn()); + + final Collection proposals = + Arrays.stream(input) + .map( + updateInput -> { + if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { + Collection datasetProposals = + DatasetUpdateInputMapper.map(context, updateInput.getUpdate(), actor); + datasetProposals.forEach( + proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); + return datasetProposals; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + final List urns = + Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); + + try { + entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } - @Override - public EntityType type() { - return EntityType.DATASET; + return batchLoad(urns, context).stream() + .map(DataFetcherResult::getData) + .collect(Collectors.toList()); + } + + @Override + public Dataset update( + @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + DatasetUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + + try { + entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List> batchLoad(@Nonnull final List urnStrs, - @Nonnull final QueryContext context) { - try { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map datasetMap = - _entityClient.batchGetV2( - Constants.DATASET_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(datasetMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.newResult() - .data(DatasetMapper.map(gmsDataset)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getInstitutionalMemory() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "dataset", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); + if (updateInput.getDeprecation() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - - @Override - public List batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); - - final Collection proposals = Arrays.stream(input).map(updateInput -> { - if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { - Collection datasetProposals = DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); - datasetProposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); - return datasetProposals; - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }).flatMap(Collection::stream).collect(Collectors.toList()); - - final List urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); - } - - return batchLoad(urns, context).stream().map(DataFetcherResult::getData).collect(Collectors.toList()); + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - - @Override - public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DatasetUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (updateInput.getEditableSchemaMetadata() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getInstitutionalMemory() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); - } - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDeprecation() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - if (updateInput.getEditableSchemaMetadata() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java index e1aa580276a504..676617bfa2f90d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.types.dataset; import com.linkedin.common.urn.DatasetUrn; - import java.net.URISyntaxException; public class DatasetUtils { - private DatasetUtils() { } + private DatasetUtils() {} - static DatasetUrn getDatasetUrn(String urnStr) { - try { - return DatasetUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); - } + static DatasetUrn getDatasetUrn(String urnStr) { + try { + return DatasetUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index e620bfb30b6b7e..05a9d77bb67e2a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -22,32 +24,30 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class VersionedDatasetType implements com.linkedin.datahub.graphql.types.EntityType { +public class VersionedDatasetType + implements com.linkedin.datahub.graphql.types.EntityType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME - ); + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; @@ -74,24 +74,28 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(@Nonnull final List versionedUrns, - @Nonnull final QueryContext context) { + public List> batchLoad( + @Nonnull final List versionedUrns, @Nonnull final QueryContext context) { try { final Map datasetMap = _entityClient.batchGetVersionedV2( + context.getOperationContext(), Constants.DATASET_ENTITY_NAME, new HashSet<>(versionedUrns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + ASPECTS_TO_RESOLVE); final List gmsResults = new ArrayList<>(); for (VersionedUrn versionedUrn : versionedUrns) { gmsResults.add(datasetMap.getOrDefault(versionedUrn.getUrn(), null)); } return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) - .build()) + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(VersionedDatasetMapper.map(context, gmsDataset)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Datasets", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 0ec9bed0c85117..89e636e6066012 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.assertion.AssertionRunEvent; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AssertionResult; import com.linkedin.datahub.graphql.generated.AssertionResultType; import com.linkedin.datahub.graphql.generated.AssertionRunStatus; @@ -12,7 +13,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class AssertionRunEventMapper implements TimeSeriesAspectMapper { @@ -20,37 +21,42 @@ public class AssertionRunEventMapper public static final AssertionRunEventMapper INSTANCE = new AssertionRunEventMapper(); public static com.linkedin.datahub.graphql.generated.AssertionRunEvent map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), AssertionRunEvent.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + AssertionRunEvent.class); final com.linkedin.datahub.graphql.generated.AssertionRunEvent assertionRunEvent = new com.linkedin.datahub.graphql.generated.AssertionRunEvent(); + assertionRunEvent.setLastObservedMillis(envelopedAspect.getSystemMetadata().getLastObserved()); assertionRunEvent.setTimestampMillis(gmsAssertionRunEvent.getTimestampMillis()); assertionRunEvent.setAssertionUrn(gmsAssertionRunEvent.getAssertionUrn().toString()); assertionRunEvent.setAsserteeUrn(gmsAssertionRunEvent.getAsserteeUrn().toString()); assertionRunEvent.setRunId(gmsAssertionRunEvent.getRunId()); - assertionRunEvent.setStatus(AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); + assertionRunEvent.setStatus( + AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { - assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); + assertionRunEvent.setBatchSpec(mapBatchSpec(context, gmsAssertionRunEvent.getBatchSpec())); } if (gmsAssertionRunEvent.hasPartitionSpec()) { assertionRunEvent.setPartitionSpec(mapPartitionSpec(gmsAssertionRunEvent.getPartitionSpec())); } if (gmsAssertionRunEvent.hasResult()) { - assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); + assertionRunEvent.setResult(mapAssertionResult(context, gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { - assertionRunEvent.setRuntimeContext(StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + assertionRunEvent.setRuntimeContext( + StringMapMapper.map(context, gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; @@ -63,7 +69,8 @@ private PartitionSpec mapPartitionSpec(com.linkedin.timeseries.PartitionSpec gms return partitionSpec; } - private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResult gmsResult) { + private AssertionResult mapAssertionResult( + @Nullable QueryContext context, com.linkedin.assertion.AssertionResult gmsResult) { AssertionResult datasetAssertionResult = new AssertionResult(); datasetAssertionResult.setRowCount(gmsResult.getRowCount()); datasetAssertionResult.setActualAggValue(gmsResult.getActualAggValue()); @@ -76,18 +83,20 @@ private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResul } if (gmsResult.hasNativeResults()) { - datasetAssertionResult.setNativeResults(StringMapMapper.map(gmsResult.getNativeResults())); + datasetAssertionResult.setNativeResults( + StringMapMapper.map(context, gmsResult.getNativeResults())); } return datasetAssertionResult; } - private BatchSpec mapBatchSpec(com.linkedin.assertion.BatchSpec gmsBatchSpec) { + private BatchSpec mapBatchSpec( + @Nullable QueryContext context, com.linkedin.assertion.BatchSpec gmsBatchSpec) { BatchSpec batchSpec = new BatchSpec(); batchSpec.setNativeBatchId(gmsBatchSpec.getNativeBatchId()); batchSpec.setLimit(gmsBatchSpec.getLimit()); batchSpec.setQuery(gmsBatchSpec.getQuery()); - batchSpec.setCustomProperties(StringMapMapper.map(gmsBatchSpec.getCustomProperties())); + batchSpec.setCustomProperties(StringMapMapper.map(context, gmsBatchSpec.getCustomProperties())); return batchSpec; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1adcea7e53dc27..a4b076f8c8bf22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -1,25 +1,31 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DatasetDeprecationMapper implements ModelMapper { +public class DatasetDeprecationMapper + implements ModelMapper { - public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); + public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { + return INSTANCE.apply(context, deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java new file mode 100644 index 00000000000000..de715f28ef7833 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java @@ -0,0 +1,28 @@ +package com.linkedin.datahub.graphql.types.dataset.mappers; + +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DatasetFilter; +import com.linkedin.datahub.graphql.generated.DatasetFilterType; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DatasetFilterMapper + implements ModelMapper { + + public static final DatasetFilterMapper INSTANCE = new DatasetFilterMapper(); + + public static DatasetFilter map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter metadata) { + return INSTANCE.apply(context, metadata); + } + + @Override + public DatasetFilter apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter input) { + final DatasetFilter result = new DatasetFilter(); + result.setType(DatasetFilterType.valueOf(input.getType().name())); + result.setSql(input.getSql()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 4867aa1d89825f..a7b5f6de0c183d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,19 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Access; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.Embed; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.Siblings; import com.linkedin.common.Status; +import com.linkedin.common.SubTypes; import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; @@ -21,6 +29,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; @@ -28,14 +37,16 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -48,156 +59,238 @@ import com.linkedin.metadata.key.DatasetKey; import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class DatasetMapper implements ModelMapper { - public static final DatasetMapper INSTANCE = new DatasetMapper(); + public static final DatasetMapper INSTANCE = new DatasetMapper(); - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); - } + public static Dataset map( + @Nullable final QueryContext context, @Nonnull final EntityResponse dataset) { + return INSTANCE.apply(context, dataset); + } + + public Dataset apply(@Nonnull final EntityResponse entityResponse) { + return apply(null, entityResponse); + } + + public Dataset apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + Dataset result = new Dataset(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATASET); - public Dataset apply(@Nonnull final EntityResponse entityResponse) { - Dataset result = new Dataset(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATASET); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); - mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); - mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> - dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> - dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); - return mappingHelper.getResult(); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema(SchemaMapper.map(context, new SchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DatasetMapper::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + SIBLINGS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSiblings(SiblingsMapper.map(context, new Siblings(dataMap)))); + mappingHelper.mapToResult( + UPSTREAM_LINEAGE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setFineGrainedLineages( + UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + ACCESS_DATASET_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dataset.class); + } else { + return mappingHelper.getResult(); } + } - private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final DatasetKey gmsKey = new DatasetKey(dataMap); - dataset.setName(gmsKey.getName()); - dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() + private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final DatasetKey gmsKey = new DatasetKey(dataMap); + dataset.setName(gmsKey.getName()); + dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); + dataset.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); - } + .setUrn(gmsKey.getPlatform().toString()) + .build()); + } - private void mapDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final DatasetProperties gmsProperties = new DatasetProperties(dataMap); - final com.linkedin.datahub.graphql.generated.DatasetProperties properties = - new com.linkedin.datahub.graphql.generated.DatasetProperties(); - properties.setDescription(gmsProperties.getDescription()); - dataset.setDescription(gmsProperties.getDescription()); - properties.setOrigin(dataset.getOrigin()); - if (gmsProperties.getExternalUrl() != null) { - properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); - } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); - if (gmsProperties.getName() != null) { - properties.setName(gmsProperties.getName()); - } else { - properties.setName(dataset.getName()); - } - properties.setQualifiedName(gmsProperties.getQualifiedName()); - dataset.setProperties(properties); - dataset.setDescription(properties.getDescription()); - if (gmsProperties.getUri() != null) { - dataset.setUri(gmsProperties.getUri().toString()); - } - TimeStamp created = gmsProperties.getCreated(); - if (created != null) { - properties.setCreated(created.getTime()); - if (created.hasActor()) { - properties.setCreatedActor(created.getActor().toString()); - } - } - TimeStamp lastModified = gmsProperties.getLastModified(); - if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } - } + private void mapDatasetProperties( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final DatasetProperties gmsProperties = new DatasetProperties(dataMap); + final com.linkedin.datahub.graphql.generated.DatasetProperties properties = + new com.linkedin.datahub.graphql.generated.DatasetProperties(); + properties.setDescription(gmsProperties.getDescription()); + dataset.setDescription(gmsProperties.getDescription()); + properties.setOrigin(dataset.getOrigin()); + if (gmsProperties.getExternalUrl() != null) { + properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - - private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); - final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); - editableProperties.setDescription(editableDatasetProperties.getDescription()); - dataset.setEditableProperties(editableProperties); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + if (gmsProperties.getName() != null) { + properties.setName(gmsProperties.getName()); + } else { + properties.setName(dataset.getName()); } - - private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final ViewProperties properties = new ViewProperties(dataMap); - final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = - new com.linkedin.datahub.graphql.generated.ViewProperties(); - graphqlProperties.setMaterialized(properties.isMaterialized()); - graphqlProperties.setLanguage(properties.getViewLanguage()); - graphqlProperties.setLogic(properties.getViewLogic()); - dataset.setViewProperties(graphqlProperties); + properties.setQualifiedName(gmsProperties.getQualifiedName()); + dataset.setProperties(properties); + dataset.setDescription(properties.getDescription()); + dataset.setName(properties.getName()); + if (gmsProperties.getUri() != null) { + dataset.setUri(gmsProperties.getUri().toString()); } + TimeStamp created = gmsProperties.getCreated(); + if (created != null) { + properties.setCreated(created.getTime()); + if (created.hasActor()) { + properties.setCreatedActor(created.getActor().toString()); + } + } + TimeStamp lastModified = gmsProperties.getLastModified(); + if (lastModified != null) { + Urn actor = lastModified.getActor(); + properties.setLastModified( + new AuditStamp(lastModified.getTime(), actor == null ? null : actor.toString())); + properties.setLastModifiedActor(actor == null ? null : actor.toString()); + } else { + properties.setLastModified(new AuditStamp(0L, null)); + } + } - private void mapGlobalTags(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataset.setGlobalTags(globalTags); - dataset.setTags(globalTags); + private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); + final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); + editableProperties.setDescription(editableDatasetProperties.getDescription()); + if (editableDatasetProperties.getName() != null) { + editableProperties.setName(editableDatasetProperties.getName()); } + dataset.setEditableProperties(editableProperties); + } + + private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final ViewProperties properties = new ViewProperties(dataMap); + final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = + new com.linkedin.datahub.graphql.generated.ViewProperties(); + graphqlProperties.setMaterialized(properties.isMaterialized()); + graphqlProperties.setLanguage(properties.getViewLanguage()); + graphqlProperties.setLogic(properties.getViewLogic()); + graphqlProperties.setFormattedLogic(properties.getFormattedViewLogic()); + dataset.setViewProperties(graphqlProperties); + } - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dataset dataset, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + dataset.setGlobalTags(globalTags); + dataset.setTags(globalTags); + } + + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); - } + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index dbaaf27a3f2bc9..e966993871d067 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataset.DatasetFieldProfile; import com.linkedin.dataset.DatasetProfile; @@ -7,21 +8,24 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -public class DatasetProfileMapper implements TimeSeriesAspectMapper { +public class DatasetProfileMapper + implements TimeSeriesAspectMapper { public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); - public static com.linkedin.datahub.graphql.generated.DatasetProfile map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + public static com.linkedin.datahub.graphql.generated.DatasetProfile map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override - public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull final EnvelopedAspect envelopedAspect) { + public com.linkedin.datahub.graphql.generated.DatasetProfile apply( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { - DatasetProfile gmsProfile = GenericRecordUtils - .deserializeAspect( + DatasetProfile gmsProfile = + GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), envelopedAspect.getAspect().getContentType(), DatasetProfile.class); @@ -35,13 +39,16 @@ public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull fina result.setTimestampMillis(gmsProfile.getTimestampMillis()); if (gmsProfile.hasFieldProfiles()) { result.setFieldProfiles( - gmsProfile.getFieldProfiles().stream().map(DatasetProfileMapper::mapFieldProfile).collect(Collectors.toList())); + gmsProfile.getFieldProfiles().stream() + .map(DatasetProfileMapper::mapFieldProfile) + .collect(Collectors.toList())); } return result; } - private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile(DatasetFieldProfile gmsProfile) { + private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile( + DatasetFieldProfile gmsProfile) { final com.linkedin.datahub.graphql.generated.DatasetFieldProfile result = new com.linkedin.datahub.graphql.generated.DatasetFieldProfile(); result.setFieldPath(gmsProfile.getFieldPath()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 78c1299ed9bd9a..104dc0e1043413 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; @@ -21,22 +24,23 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetUpdateInputMapper implements InputModelMapper, Urn> { +public class DatasetUpdateInputMapper + implements InputModelMapper, Urn> { public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection map( + @Nullable final QueryContext context, @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(datasetUpdateInput, actor); + return INSTANCE.apply(context, datasetUpdateInput, actor); } @Override public Collection apply( + @Nullable final QueryContext context, @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(6); @@ -46,8 +50,10 @@ public Collection apply( auditStamp.setTime(System.currentTimeMillis()); if (datasetUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, datasetUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (datasetUpdateInput.getDeprecation() != null) { @@ -58,29 +64,33 @@ public Collection apply( } deprecation.setNote(datasetUpdateInput.getDeprecation().getNote()); deprecation.setActor(actor, SetMode.IGNORE_NULL); - proposals.add(updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); } if (datasetUpdateInput.getInstitutionalMemory() != null) { - proposals.add(updateMappingHelper.aspectToProposal(InstitutionalMemoryUpdateMapper - .map(datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + InstitutionalMemoryUpdateMapper.map( + context, datasetUpdateInput.getInstitutionalMemory()), + INSTITUTIONAL_MEMORY_ASPECT_NAME)); } if (datasetUpdateInput.getTags() != null || datasetUpdateInput.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); if (datasetUpdateInput.getGlobalTags() != null) { - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getGlobalTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } @@ -89,28 +99,38 @@ public Collection apply( final EditableSchemaMetadata editableSchemaMetadata = new EditableSchemaMetadata(); editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( - datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream().map( - element -> mapSchemaFieldInfo(element) - ).collect(Collectors.toList()))); + datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() + .map(element -> mapSchemaFieldInfo(context, element)) + .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); } if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription(datasetUpdateInput.getEditableProperties().getDescription()); + if (datasetUpdateInput.getEditableProperties().getDescription() != null) { + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); + } + if (datasetUpdateInput.getEditableProperties().getName() != null) { + editableDatasetProperties.setName(datasetUpdateInput.getEditableProperties().getName()); + } editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); } return proposals; } private EditableSchemaFieldInfo mapSchemaFieldInfo( - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo - ) { + @Nullable QueryContext context, + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); if (schemaFieldInfo.getDescription() != null) { @@ -120,11 +140,14 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( if (schemaFieldInfo.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(schemaFieldInfo.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element)).collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + schemaFieldInfo.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(context, element)) + .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } return output; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index 922574d5051d30..15ba9d025ec85b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -1,42 +1,44 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; - import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class EditableSchemaFieldInfoMapper { - public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); + public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); - public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(fieldInfo, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo fieldInfo, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, fieldInfo, entityUrn); + } - public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); - if (input.hasDescription()) { - result.setDescription((input.getDescription())); - } - if (input.hasFieldPath()) { - result.setFieldPath((input.getFieldPath())); - } - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - return result; + public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = + new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); + if (input.hasDescription()) { + result.setDescription((input.getDescription())); + } + if (input.hasFieldPath()) { + result.setFieldPath((input.getFieldPath())); + } + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 376558d2fd18cb..1c1e77f66a1ece 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,28 +1,33 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; -import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.common.urn.Urn; - -import javax.annotation.Nonnull; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EditableSchemaMetadataMapper { - public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - - public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(metadata, entityUrn); - } + public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply(@Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); - result.setEditableSchemaFieldInfo(input.getEditableSchemaFieldInfo().stream().map(schemaField -> - EditableSchemaFieldInfoMapper.map(schemaField, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); + } + public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = + new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); + result.setEditableSchemaFieldInfo( + input.getEditableSchemaFieldInfo().stream() + .map(schemaField -> EditableSchemaFieldInfoMapper.map(context, schemaField, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b76767fa5d0454..56ec8de758857a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -1,45 +1,48 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import lombok.extern.slf4j.Slf4j; - import java.util.stream.Collectors; - +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { - private ForeignKeyConstraintMapper() { } + private ForeignKeyConstraintMapper() {} - public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { + public static ForeignKeyConstraint map( + @Nullable QueryContext context, com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); result.setName(constraint.getName()); if (constraint.hasForeignDataset()) { - result.setForeignDataset((Dataset) UrnToEntityMapper.map(constraint.getForeignDataset())); + result.setForeignDataset( + (Dataset) UrnToEntityMapper.map(context, constraint.getForeignDataset())); } if (constraint.hasSourceFields()) { result.setSourceFields( - constraint.getSourceFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getSourceFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) + .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( - constraint.getForeignFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getForeignFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) + .collect(Collectors.toList())); } return result; } - private static SchemaFieldEntity mapSchemaFieldEntity(Urn schemaFieldUrn) { + private static SchemaFieldEntity mapSchemaFieldEntity( + @Nullable QueryContext context, Urn schemaFieldUrn) { SchemaFieldEntity result = new SchemaFieldEntity(); try { Urn resourceUrn = Urn.createFromString(schemaFieldUrn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(resourceUrn)); + result.setParent(UrnToEntityMapper.map(context, resourceUrn)); } catch (Exception e) { throw new RuntimeException("Error converting schemaField parent urn string to Urn", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index 515cba5e99c74e..28096f30d1817d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -1,67 +1,74 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.KeyValueSchema; import com.linkedin.datahub.graphql.generated.PlatformSchema; import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class PlatformSchemaMapper implements ModelMapper { +public class PlatformSchemaMapper + implements ModelMapper { - public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); + public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); - } + public static PlatformSchema map( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(context, metadata); + } - @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { - Object result; - if (input.isSchemaless()) { - return null; - } else if (input.isPrestoDDL()) { - final TableSchema prestoSchema = new TableSchema(); - prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); - result = prestoSchema; - } else if (input.isOracleDDL()) { - final TableSchema oracleSchema = new TableSchema(); - oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); - result = oracleSchema; - } else if (input.isMySqlDDL()) { - final TableSchema mySqlSchema = new TableSchema(); - mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); - result = mySqlSchema; - } else if (input.isKafkaSchema()) { - final TableSchema kafkaSchema = new TableSchema(); - kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); - result = kafkaSchema; - } else if (input.isOrcSchema()) { - final TableSchema orcSchema = new TableSchema(); - orcSchema.setSchema(input.getOrcSchema().getSchema()); - result = orcSchema; - } else if (input.isBinaryJsonSchema()) { - final TableSchema binaryJsonSchema = new TableSchema(); - binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); - result = binaryJsonSchema; - } else if (input.isEspressoSchema()) { - final KeyValueSchema espressoSchema = new KeyValueSchema(); - espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); - espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); - result = espressoSchema; - } else if (input.isKeyValueSchema()) { - final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); - otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); - otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); - result = otherKeyValueSchema; - } else if (input.isOtherSchema()) { - final TableSchema otherTableSchema = new TableSchema(); - otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); - result = otherTableSchema; - } else { - throw new RuntimeException(String.format("Unrecognized platform schema type %s provided", input.memberType().getType().name())); - } - return (PlatformSchema) result; + @Override + public PlatformSchema apply( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema input) { + Object result; + if (input.isSchemaless()) { + return null; + } else if (input.isPrestoDDL()) { + final TableSchema prestoSchema = new TableSchema(); + prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); + result = prestoSchema; + } else if (input.isOracleDDL()) { + final TableSchema oracleSchema = new TableSchema(); + oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); + result = oracleSchema; + } else if (input.isMySqlDDL()) { + final TableSchema mySqlSchema = new TableSchema(); + mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); + result = mySqlSchema; + } else if (input.isKafkaSchema()) { + final TableSchema kafkaSchema = new TableSchema(); + kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); + result = kafkaSchema; + } else if (input.isOrcSchema()) { + final TableSchema orcSchema = new TableSchema(); + orcSchema.setSchema(input.getOrcSchema().getSchema()); + result = orcSchema; + } else if (input.isBinaryJsonSchema()) { + final TableSchema binaryJsonSchema = new TableSchema(); + binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); + result = binaryJsonSchema; + } else if (input.isEspressoSchema()) { + final KeyValueSchema espressoSchema = new KeyValueSchema(); + espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); + espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); + result = espressoSchema; + } else if (input.isKeyValueSchema()) { + final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); + otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); + otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); + result = otherKeyValueSchema; + } else if (input.isOtherSchema()) { + final TableSchema otherTableSchema = new TableSchema(); + otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); + result = otherTableSchema; + } else { + throw new RuntimeException( + String.format( + "Unrecognized platform schema type %s provided", + input.memberType().getType().name())); } + return (PlatformSchema) result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f05a1adb6b443f..3674186ac23fe6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -1,74 +1,99 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; - +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.metadata.utils.SchemaFieldUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaFieldMapper { - public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaField map(@Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static SchemaField map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField metadata, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); + } - public SchemaField apply(@Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { - final SchemaField result = new SchemaField(); - result.setDescription(input.getDescription()); - result.setFieldPath(input.getFieldPath()); - result.setJsonPath(input.getJsonPath()); - result.setRecursive(input.isRecursive()); - result.setNullable(input.isNullable()); - result.setNativeDataType(input.getNativeDataType()); - result.setType(mapSchemaFieldDataType(input.getType())); - result.setLabel(input.getLabel()); - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - result.setIsPartOfKey(input.isIsPartOfKey()); - result.setIsPartitioningKey(input.isIsPartitioningKey()); - return result; + public SchemaField apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField input, + @Nonnull Urn entityUrn) { + final SchemaField result = new SchemaField(); + result.setDescription(input.getDescription()); + result.setFieldPath(input.getFieldPath()); + result.setJsonPath(input.getJsonPath()); + result.setRecursive(input.isRecursive()); + result.setNullable(input.isNullable()); + result.setNativeDataType(input.getNativeDataType()); + result.setType(mapSchemaFieldDataType(input.getType())); + result.setLabel(input.getLabel()); + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } + result.setIsPartOfKey(input.isIsPartOfKey()); + result.setIsPartitioningKey(input.isIsPartitioningKey()); + result.setJsonProps(input.getJsonProps()); + result.setSchemaFieldEntity(this.createSchemaFieldEntity(input, entityUrn)); + return result; + } - private SchemaFieldDataType mapSchemaFieldDataType(@Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { - final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); - if (type.isBytesType()) { - return SchemaFieldDataType.BYTES; - } else if (type.isFixedType()) { - return SchemaFieldDataType.FIXED; - } else if (type.isBooleanType()) { - return SchemaFieldDataType.BOOLEAN; - } else if (type.isStringType()) { - return SchemaFieldDataType.STRING; - } else if (type.isNumberType()) { - return SchemaFieldDataType.NUMBER; - } else if (type.isDateType()) { - return SchemaFieldDataType.DATE; - } else if (type.isTimeType()) { - return SchemaFieldDataType.TIME; - } else if (type.isEnumType()) { - return SchemaFieldDataType.ENUM; - } else if (type.isNullType()) { - return SchemaFieldDataType.NULL; - } else if (type.isArrayType()) { - return SchemaFieldDataType.ARRAY; - } else if (type.isMapType()) { - return SchemaFieldDataType.MAP; - } else if (type.isRecordType()) { - return SchemaFieldDataType.STRUCT; - } else if (type.isUnionType()) { - return SchemaFieldDataType.UNION; - } else { - throw new RuntimeException(String.format("Unrecognized SchemaFieldDataType provided %s", - type.memberType().toString())); - } + public SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isNullType()) { + return SchemaFieldDataType.NULL; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else if (type.isRecordType()) { + return SchemaFieldDataType.STRUCT; + } else if (type.isUnionType()) { + return SchemaFieldDataType.UNION; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } + } + + private SchemaFieldEntity createSchemaFieldEntity( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + SchemaFieldEntity schemaFieldEntity = new SchemaFieldEntity(); + schemaFieldEntity.setUrn( + SchemaFieldUtils.generateSchemaFieldUrn(entityUrn.toString(), input.getFieldPath()) + .toString()); + schemaFieldEntity.setType(EntityType.SCHEMA_FIELD); + return schemaFieldEntity; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index eb793cc17efb6b..fd089184fb1c4b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -1,47 +1,62 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.stream.Collectors; public class SchemaMapper { - public static final SchemaMapper INSTANCE = new SchemaMapper(); + public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); - } + public static Schema map( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, null, entityUrn); + } - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); - } + public static Schema map( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata metadata, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, systemMetadata, entityUrn); + } - public Schema apply(@Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - final Schema result = new Schema(); - if (input.getDataset() != null) { - result.setDatasetUrn(input.getDataset().toString()); - } - if (systemMetadata != null) { - result.setLastObserved(systemMetadata.getLastObserved()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - if (input.getForeignKeys() != null) { - result.setForeignKeys(input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) - .collect(Collectors.toList())); - } - return result; + public Schema apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.schema.SchemaMetadata input, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + final Schema result = new Schema(); + if (input.getDataset() != null) { + result.setDatasetUrn(input.getDataset().toString()); + } + if (systemMetadata != null) { + result.setLastObserved(systemMetadata.getLastObserved()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); + if (input.getForeignKeys() != null) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(fk -> ForeignKeyConstraintMapper.map(context, fk)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 00cb91bed8abb2..327cae3bae11f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -1,48 +1,63 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.schema.SchemaMetadata; import java.util.stream.Collectors; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class SchemaMetadataMapper { - public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); + public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); - public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(aspect, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); + } - public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - final com.linkedin.datahub.graphql.generated.SchemaMetadata result = - new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + return apply(context, input, entityUrn, aspect.getVersion()); + } - if (input.hasDataset()) { - result.setDatasetUrn(input.getDataset().toString()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); - if (input.hasForeignKeys()) { - result.setForeignKeys(input.getForeignKeys().stream().map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map( - foreignKeyConstraint - )).collect(Collectors.toList())); - } - return result; + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata input, + final Urn entityUrn, + final long version) { + final com.linkedin.datahub.graphql.generated.SchemaMetadata result = + new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + + if (input.hasDataset()) { + result.setDatasetUrn(input.getDataset().toString()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); + result.setAspectVersion(version); + if (input.hasForeignKeys()) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map( + foreignKeyConstraint -> + ForeignKeyConstraintMapper.map(context, foreignKeyConstraint)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 241c4872b1caa4..817c7c983ecc5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -8,17 +11,19 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DatasetEditableProperties; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.generated.VersionedDataset; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -36,27 +41,27 @@ import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class VersionedDatasetMapper implements ModelMapper { public static final VersionedDatasetMapper INSTANCE = new VersionedDatasetMapper(); - public static VersionedDataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); + public static VersionedDataset map( + @Nullable final QueryContext context, @Nonnull final EntityResponse dataset) { + return INSTANCE.apply(context, dataset); } @Override - public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { + public VersionedDataset apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { VersionedDataset result = new VersionedDataset(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityResponse.getUrn().toString()); @@ -67,30 +72,63 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { SystemMetadata schemaSystemMetadata = getSystemMetadata(aspectMap, SCHEMA_METADATA_ASPECT_NAME); mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema( + SchemaMapper.map( + context, new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - - return mappingHelper.getResult(); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, CONTAINER_ASPECT_NAME, VersionedDatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, VersionedDatasetMapper::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), VersionedDataset.class); + } else { + return mappingHelper.getResult(); + } } private SystemMetadata getSystemMetadata(EnvelopedAspectMap aspectMap, String aspectName) { @@ -104,12 +142,15 @@ private void mapDatasetKey(@Nonnull VersionedDataset dataset, @Nonnull DataMap d final DatasetKey gmsKey = new DatasetKey(dataMap); dataset.setName(gmsKey.getName()); dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + dataset.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); } - private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { + private void mapDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { final DatasetProperties gmsProperties = new DatasetProperties(dataMap); final com.linkedin.datahub.graphql.generated.DatasetProperties properties = new com.linkedin.datahub.graphql.generated.DatasetProperties(); @@ -118,7 +159,8 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da if (gmsProperties.getExternalUrl() != null) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); if (gmsProperties.getName() != null) { properties.setName(gmsProperties.getName()); } else { @@ -128,8 +170,10 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da dataset.setProperties(properties); } - private void mapEditableDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); + private void mapEditableDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); dataset.setEditableProperties(editableProperties); @@ -145,23 +189,35 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } - private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } - private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java new file mode 100644 index 00000000000000..b2e3b2c7447d81 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.DataTypeInfo; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class DataTypeEntityMapper implements ModelMapper { + + public static final DataTypeEntityMapper INSTANCE = new DataTypeEntityMapper(); + + public static DataTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public DataTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final DataTypeEntity result = new DataTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_TYPE_INFO_ASPECT_NAME, this::mapDataTypeInfo); + + // Set the standard Type ENUM for the data type. + if (result.getInfo() != null) { + result.getInfo().setType(DataTypeUrnMapper.getType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapDataTypeInfo(@Nonnull DataTypeEntity dataType, @Nonnull DataMap dataMap) { + com.linkedin.datatype.DataTypeInfo gmsInfo = new com.linkedin.datatype.DataTypeInfo(dataMap); + DataTypeInfo info = new DataTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + dataType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java new file mode 100644 index 00000000000000..778af0a7cb5068 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import static com.linkedin.metadata.Constants.DATA_TYPE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class DataTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATA_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.DATA_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + DATA_TYPE_ENTITY_NAME, + new HashSet<>(dataTypeUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataTypeEntityMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load data type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java new file mode 100644 index 00000000000000..ec71cd63a70d5e --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeUrnMapper.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.graphql.types.datatype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.StdDataType; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class DataTypeUrnMapper { + + static final Map DATA_TYPE_ENUM_TO_URN = + ImmutableMap.builder() + .put(StdDataType.STRING, "urn:li:dataType:datahub.string") + .put(StdDataType.NUMBER, "urn:li:dataType:datahub.number") + .put(StdDataType.URN, "urn:li:dataType:datahub.urn") + .put(StdDataType.RICH_TEXT, "urn:li:dataType:datahub.rich_text") + .put(StdDataType.DATE, "urn:li:dataType:datahub.date") + .build(); + + private static final Map URN_TO_DATA_TYPE_ENUM = + DATA_TYPE_ENUM_TO_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private DataTypeUrnMapper() {} + + public static StdDataType getType(String dataTypeUrn) { + if (!URN_TO_DATA_TYPE_ENUM.containsKey(dataTypeUrn)) { + return StdDataType.OTHER; + } + return URN_TO_DATA_TYPE_ENUM.get(dataTypeUrn); + } + + @Nonnull + public static String getUrn(StdDataType dataType) { + if (!DATA_TYPE_ENUM_TO_URN.containsKey(dataType)) { + throw new IllegalArgumentException("Unknown data type: " + dataType); + } + return DATA_TYPE_ENUM_TO_URN.get(dataType); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index df8de87ff69ff1..37b2018a2d4506 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -1,36 +1,46 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainAssociation; import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DomainAssociationMapper { - public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); + public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); - public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, - @Nonnull final String entityUrn - ) { - return INSTANCE.apply(domains, entityUrn); - } + public static DomainAssociation map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + return INSTANCE.apply(context, domains, entityUrn); + } - public DomainAssociation apply(@Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { - DomainAssociation association = new DomainAssociation(); - association.setDomain(Domain.builder() - .setType(EntityType.DOMAIN) - .setUrn(domains.getDomains().get(0).toString()).build()); - association.setAssociatedUrn(entityUrn); - return association; - } - return null; + public DomainAssociation apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0 + && (context == null + || canView(context.getOperationContext(), domains.getDomains().get(0)))) { + DomainAssociation association = new DomainAssociation(); + association.setDomain( + Domain.builder() + .setType(EntityType.DOMAIN) + .setUrn(domains.getDomains().get(0).toString()) + .build()); + association.setAssociatedUrn(entityUrn); + return association; } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index fe52b5eff718fc..7d05e0862a96da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -1,23 +1,33 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + +import com.linkedin.common.Forms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.domain.DomainProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; - +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nullable; public class DomainMapper { - public static Domain map(final EntityResponse entityResponse) { + public static Domain map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Domain result = new Domain(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -33,30 +43,58 @@ public static Domain map(final EntityResponse entityResponse) { return null; } - final EnvelopedAspect envelopedDomainProperties = aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedDomainProperties = + aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (envelopedDomainProperties != null) { - result.setProperties(mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); + result.setProperties( + mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); + } + + final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); + if (envelopedStructuredProps != null) { + result.setStructuredProperties( + StructuredPropertiesMapper.map( + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); } - return result; + final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); + if (envelopedForms != null) { + result.setForms( + FormsMapper.map(new Forms(envelopedForms.getValue().data()), entityUrn.toString())); + } + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, Domain.class); + } else { + return result; + } } - private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties(final DomainProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = new com.linkedin.datahub.graphql.generated.DomainProperties(); + private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( + final DomainProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.DomainProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); return propertiesResult; } - private DomainMapper() { } + private DomainMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 4879c339d99faa..7afe1018004e02 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -17,8 +17,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashSet; @@ -29,19 +27,23 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; - -public class DomainType implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DomainType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STRUCTURED_PROPERTIES_ASPECT_NAME, + Constants.FORMS_ASPECT_NAME); private final EntityClient _entityClient; - public DomainType(final EntityClient entityClient) { + public DomainType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -61,28 +63,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List domainUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List domainUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.DOMAIN_ENTITY_NAME, - new HashSet<>(domainUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.DOMAIN_ENTITY_NAME, + new HashSet<>(domainUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DomainMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DomainMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Domains", e); @@ -90,25 +94,31 @@ public List> batchLoad(@Nonnull List urns, @No } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Domain entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Domain entity type"); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), Constants.DOMAIN_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } - private Urn getUrn(final String urnStr) { try { return Urn.createFromString(urnStr); @@ -116,4 +126,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java new file mode 100644 index 00000000000000..8afdd3f60e2205 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java @@ -0,0 +1,58 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityTypeInfo; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class EntityTypeEntityMapper implements ModelMapper { + + public static final EntityTypeEntityMapper INSTANCE = new EntityTypeEntityMapper(); + + public static EntityTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public EntityTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + final EntityTypeEntity result = new EntityTypeEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.ENTITY_TYPE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ENTITY_TYPE_INFO_ASPECT_NAME, this::mapEntityTypeInfo); + + // Set the standard Type ENUM for the entity type. + if (result.getInfo() != null) { + result + .getInfo() + .setType(EntityTypeUrnMapper.getEntityType(entityResponse.getUrn().toString())); + } + return mappingHelper.getResult(); + } + + private void mapEntityTypeInfo(@Nonnull EntityTypeEntity entityType, @Nonnull DataMap dataMap) { + com.linkedin.entitytype.EntityTypeInfo gmsInfo = + new com.linkedin.entitytype.EntityTypeInfo(dataMap); + EntityTypeInfo info = new EntityTypeInfo(); + info.setQualifiedName(gmsInfo.getQualifiedName()); + if (gmsInfo.getDisplayName() != null) { + info.setDisplayName(gmsInfo.getDisplayName()); + } + if (gmsInfo.getDescription() != null) { + info.setDescription(gmsInfo.getDescription()); + } + entityType.setInfo(info); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java new file mode 100644 index 00000000000000..77457a814bd677 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeMapper.java @@ -0,0 +1,84 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +/** + * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service + * Storage Entities + */ +public class EntityTypeMapper { + + static final Map ENTITY_TYPE_TO_NAME = + ImmutableMap.builder() + .put(EntityType.DOMAIN, Constants.DOMAIN_ENTITY_NAME) + .put(EntityType.DATASET, Constants.DATASET_ENTITY_NAME) + .put(EntityType.CORP_USER, Constants.CORP_USER_ENTITY_NAME) + .put(EntityType.CORP_GROUP, Constants.CORP_GROUP_ENTITY_NAME) + .put(EntityType.DATA_PLATFORM, Constants.DATA_PLATFORM_ENTITY_NAME) + .put(EntityType.ER_MODEL_RELATIONSHIP, Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME) + .put(EntityType.DASHBOARD, Constants.DASHBOARD_ENTITY_NAME) + .put(EntityType.NOTEBOOK, Constants.NOTEBOOK_ENTITY_NAME) + .put(EntityType.CHART, Constants.CHART_ENTITY_NAME) + .put(EntityType.DATA_FLOW, Constants.DATA_FLOW_ENTITY_NAME) + .put(EntityType.DATA_JOB, Constants.DATA_JOB_ENTITY_NAME) + .put(EntityType.TAG, Constants.TAG_ENTITY_NAME) + .put(EntityType.GLOSSARY_TERM, Constants.GLOSSARY_TERM_ENTITY_NAME) + .put(EntityType.GLOSSARY_NODE, Constants.GLOSSARY_NODE_ENTITY_NAME) + .put(EntityType.CONTAINER, Constants.CONTAINER_ENTITY_NAME) + .put(EntityType.MLMODEL, Constants.ML_MODEL_ENTITY_NAME) + .put(EntityType.MLMODEL_GROUP, Constants.ML_MODEL_GROUP_ENTITY_NAME) + .put(EntityType.MLFEATURE_TABLE, Constants.ML_FEATURE_TABLE_ENTITY_NAME) + .put(EntityType.MLFEATURE, Constants.ML_FEATURE_ENTITY_NAME) + .put(EntityType.MLPRIMARY_KEY, Constants.ML_PRIMARY_KEY_ENTITY_NAME) + .put(EntityType.INGESTION_SOURCE, Constants.INGESTION_SOURCE_ENTITY_NAME) + .put(EntityType.EXECUTION_REQUEST, Constants.EXECUTION_REQUEST_ENTITY_NAME) + .put(EntityType.ASSERTION, Constants.ASSERTION_ENTITY_NAME) + .put(EntityType.DATA_PROCESS_INSTANCE, Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME) + .put(EntityType.DATA_PLATFORM_INSTANCE, Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .put(EntityType.ACCESS_TOKEN, Constants.ACCESS_TOKEN_ENTITY_NAME) + .put(EntityType.TEST, Constants.TEST_ENTITY_NAME) + .put(EntityType.DATAHUB_POLICY, Constants.POLICY_ENTITY_NAME) + .put(EntityType.DATAHUB_ROLE, Constants.DATAHUB_ROLE_ENTITY_NAME) + .put(EntityType.POST, Constants.POST_ENTITY_NAME) + .put(EntityType.SCHEMA_FIELD, Constants.SCHEMA_FIELD_ENTITY_NAME) + .put(EntityType.DATAHUB_VIEW, Constants.DATAHUB_VIEW_ENTITY_NAME) + .put(EntityType.QUERY, Constants.QUERY_ENTITY_NAME) + .put(EntityType.DATA_PRODUCT, Constants.DATA_PRODUCT_ENTITY_NAME) + .put(EntityType.CUSTOM_OWNERSHIP_TYPE, Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .put(EntityType.INCIDENT, Constants.INCIDENT_ENTITY_NAME) + .put(EntityType.ROLE, Constants.ROLE_ENTITY_NAME) + .put(EntityType.STRUCTURED_PROPERTY, Constants.STRUCTURED_PROPERTY_ENTITY_NAME) + .put(EntityType.FORM, Constants.FORM_ENTITY_NAME) + .put(EntityType.DATA_TYPE, Constants.DATA_TYPE_ENTITY_NAME) + .put(EntityType.ENTITY_TYPE, Constants.ENTITY_TYPE_ENTITY_NAME) + .put(EntityType.RESTRICTED, Constants.RESTRICTED_ENTITY_NAME) + .put(EntityType.BUSINESS_ATTRIBUTE, Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME) + .build(); + + private static final Map ENTITY_NAME_TO_TYPE = + ENTITY_TYPE_TO_NAME.entrySet().stream() + .collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); + + private EntityTypeMapper() {} + + public static EntityType getType(String name) { + String lowercaseName = name.toLowerCase(); + if (!ENTITY_NAME_TO_TYPE.containsKey(lowercaseName)) { + return EntityType.OTHER; + } + return ENTITY_NAME_TO_TYPE.get(lowercaseName); + } + + @Nonnull + public static String getName(EntityType type) { + if (!ENTITY_TYPE_TO_NAME.containsKey(type)) { + throw new IllegalArgumentException("Unknown entity type: " + type); + } + return ENTITY_TYPE_TO_NAME.get(type); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java new file mode 100644 index 00000000000000..f1fc58a010074f --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class EntityTypeType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(ENTITY_TYPE_INFO_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.ENTITY_TYPE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return EntityTypeEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List entityTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + ENTITY_TYPE_ENTITY_NAME, + new HashSet<>(entityTypeUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityTypeUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(EntityTypeEntityMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load entity type entities", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java new file mode 100644 index 00000000000000..334faf753cb8b5 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeUrnMapper.java @@ -0,0 +1,114 @@ +package com.linkedin.datahub.graphql.types.entitytype; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.metadata.Constants; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +/** + * In this class we statically map "well-supported" entity types into a more usable Enum class + * served by our GraphQL API. + * + *

When we add new entity types / entity urns, we MAY NEED to update this. + * + *

Note that we currently do not support mapping entities that fall outside of this set. If you + * try to map an entity type without a corresponding enum symbol, the mapping WILL FAIL. + */ +public class EntityTypeUrnMapper { + + static final Map ENTITY_NAME_TO_ENTITY_TYPE_URN = + ImmutableMap.builder() + .put(Constants.DOMAIN_ENTITY_NAME, "urn:li:entityType:datahub.domain") + .put(Constants.DATASET_ENTITY_NAME, "urn:li:entityType:datahub.dataset") + .put(Constants.CORP_USER_ENTITY_NAME, "urn:li:entityType:datahub.corpuser") + .put(Constants.CORP_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.corpGroup") + .put(Constants.DATA_PLATFORM_ENTITY_NAME, "urn:li:entityType:datahub.dataPlatform") + .put( + Constants.ER_MODEL_RELATIONSHIP_ENTITY_NAME, + "urn:li:entityType:datahub.erModelRelationship") + .put(Constants.DASHBOARD_ENTITY_NAME, "urn:li:entityType:datahub.dashboard") + .put(Constants.NOTEBOOK_ENTITY_NAME, "urn:li:entityType:datahub.notebook") + .put(Constants.CHART_ENTITY_NAME, "urn:li:entityType:datahub.chart") + .put(Constants.DATA_FLOW_ENTITY_NAME, "urn:li:entityType:datahub.dataFlow") + .put(Constants.DATA_JOB_ENTITY_NAME, "urn:li:entityType:datahub.dataJob") + .put(Constants.TAG_ENTITY_NAME, "urn:li:entityType:datahub.tag") + .put(Constants.GLOSSARY_TERM_ENTITY_NAME, "urn:li:entityType:datahub.glossaryTerm") + .put(Constants.GLOSSARY_NODE_ENTITY_NAME, "urn:li:entityType:datahub.glossaryNode") + .put(Constants.CONTAINER_ENTITY_NAME, "urn:li:entityType:datahub.container") + .put(Constants.ML_MODEL_ENTITY_NAME, "urn:li:entityType:datahub.mlModel") + .put(Constants.ML_MODEL_GROUP_ENTITY_NAME, "urn:li:entityType:datahub.mlModelGroup") + .put(Constants.ML_FEATURE_TABLE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeatureTable") + .put(Constants.ML_FEATURE_ENTITY_NAME, "urn:li:entityType:datahub.mlFeature") + .put(Constants.ML_PRIMARY_KEY_ENTITY_NAME, "urn:li:entityType:datahub.mlPrimaryKey") + .put( + Constants.INGESTION_SOURCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataHubIngestionSource") + .put( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + "urn:li:entityType:datahub.dataHubExecutionRequest") + .put(Constants.ASSERTION_ENTITY_NAME, "urn:li:entityType:datahub.assertion") + .put( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataProcessInstance") + .put( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + "urn:li:entityType:datahub.dataPlatformInstance") + .put(Constants.ACCESS_TOKEN_ENTITY_NAME, "urn:li:entityType:datahub.dataHubAccessToken") + .put(Constants.TEST_ENTITY_NAME, "urn:li:entityType:datahub.test") + .put(Constants.POLICY_ENTITY_NAME, "urn:li:entityType:datahub.dataHubPolicy") + .put(Constants.DATAHUB_ROLE_ENTITY_NAME, "urn:li:entityType:datahub.dataHubRole") + .put(Constants.POST_ENTITY_NAME, "urn:li:entityType:datahub.post") + .put(Constants.SCHEMA_FIELD_ENTITY_NAME, "urn:li:entityType:datahub.schemaField") + .put(Constants.DATAHUB_VIEW_ENTITY_NAME, "urn:li:entityType:datahub.dataHubView") + .put(Constants.QUERY_ENTITY_NAME, "urn:li:entityType:datahub.query") + .put(Constants.DATA_PRODUCT_ENTITY_NAME, "urn:li:entityType:datahub.dataProduct") + .put(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.ownershipType") + .put(Constants.INCIDENT_ENTITY_NAME, "urn:li:entityType:datahub.incident") + .put(Constants.ROLE_ENTITY_NAME, "urn:li:entityType:datahub.role") + .put( + Constants.STRUCTURED_PROPERTY_ENTITY_NAME, + "urn:li:entityType:datahub.structuredProperty") + .put(Constants.FORM_ENTITY_NAME, "urn:li:entityType:datahub.form") + .put(Constants.DATA_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.dataType") + .put(Constants.ENTITY_TYPE_ENTITY_NAME, "urn:li:entityType:datahub.entityType") + .put(Constants.RESTRICTED_ENTITY_NAME, "urn:li:entityType:datahub.restricted") + .put( + Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME, + "urn:li:entityType:datahub.businessAttribute") + .build(); + + private static final Map ENTITY_TYPE_URN_TO_NAME = + ENTITY_NAME_TO_ENTITY_TYPE_URN.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getValue, Map.Entry::getKey)); + + private EntityTypeUrnMapper() {} + + public static String getName(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + return ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + } + + /* + * Takes in a entityTypeUrn and returns a GraphQL EntityType by first mapping + * the urn to the entity name, and then mapping the entity name to EntityType. + */ + public static EntityType getEntityType(String entityTypeUrn) { + if (!ENTITY_TYPE_URN_TO_NAME.containsKey(entityTypeUrn)) { + throw new IllegalArgumentException("Unknown entityTypeUrn: " + entityTypeUrn); + } + final String entityName = ENTITY_TYPE_URN_TO_NAME.get(entityTypeUrn); + return EntityTypeMapper.getType(entityName); + } + + @Nonnull + public static String getEntityTypeUrn(String name) { + if (!ENTITY_NAME_TO_ENTITY_TYPE_URN.containsKey(name)) { + throw new IllegalArgumentException("Unknown entity name: " + name); + } + return ENTITY_NAME_TO_ENTITY_TYPE_URN.get(name); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java new file mode 100644 index 00000000000000..cafd0b5ab082b2 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java @@ -0,0 +1,118 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipPropertiesInput; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationMapper; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationshipUpdateInputMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.service.ERModelRelationshipService; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.nio.charset.StandardCharsets; +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.digest.DigestUtils; + +@Slf4j +@RequiredArgsConstructor +public class CreateERModelRelationshipResolver + implements DataFetcher> { + + private final EntityClient _entityClient; + private final ERModelRelationshipService _erModelRelationshipService; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + final ERModelRelationshipUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationshipUpdateInput.class); + + final ERModelRelationshipPropertiesInput erModelRelationshipPropertiesInput = + input.getProperties(); + String ermodelrelationName = erModelRelationshipPropertiesInput.getName(); + String source = erModelRelationshipPropertiesInput.getSource(); + String destination = erModelRelationshipPropertiesInput.getDestination(); + + String lowDataset = source; + String highDataset = destination; + if (source.compareTo(destination) > 0) { + lowDataset = destination; + highDataset = source; + } + // The following sequence mimics datahub.emitter.mce_builder.datahub_guid + // Keys have to be in alphabetical order - Destination, ERModelRelationName and Source + + String ermodelrelationKey = + "{\"Destination\":\"" + + lowDataset + + "\",\"ERModelRelationName\":\"" + + ermodelrelationName + + "\",\"Source\":\"" + + highDataset + + "\"}"; + + byte[] mybytes = ermodelrelationKey.getBytes(StandardCharsets.UTF_8); + + String ermodelrelationKeyEncoded = new String(mybytes, StandardCharsets.UTF_8); + String ermodelrelationGuid = DigestUtils.md5Hex(ermodelrelationKeyEncoded); + log.info( + "ermodelrelationkey {}, ermodelrelationGuid {}", + ermodelrelationKeyEncoded, + ermodelrelationGuid); + + ERModelRelationshipUrn inputUrn = new ERModelRelationshipUrn(ermodelrelationGuid); + QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationshipType.canCreateERModelRelation( + context, + Urn.createFromString(input.getProperties().getSource()), + Urn.createFromString(input.getProperties().getDestination()))) { + throw new AuthorizationException( + "Unauthorized to create erModelRelationship. Please contact your DataHub administrator."); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationshipUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to create erModelRelationship entity", e); + } + return ERModelRelationMapper.map( + context, + _erModelRelationshipService.getERModelRelationshipResponse( + context.getOperationContext(), Urn.createFromString(inputUrn.toString()))); + } catch (Exception e) { + log.error( + "Failed to create ERModelRelation to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create erModelRelationship to resource with input %s", input), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java new file mode 100644 index 00000000000000..fd340aca119b59 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java @@ -0,0 +1,249 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; +import com.linkedin.datahub.graphql.generated.AutoCompleteResults; +import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationMapper; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; +import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; +import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; +import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.query.AutoCompleteResult; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ERModelRelationshipType + implements com.linkedin.datahub.graphql.types.EntityType, + BrowsableEntityType, + SearchableEntityType { + + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + ER_MODEL_RELATIONSHIP_KEY_ASPECT_NAME, + ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME); + + private static final Set FACET_FIELDS = ImmutableSet.of("name"); + private static final String ENTITY_NAME = "erModelRelationship"; + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public ERModelRelationshipType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = + featureFlags; // TODO: check if ERModelRelation Feture is Enabled and throw error when + // called + } + + @Override + public Class objectClass() { + return ERModelRelationship.class; + } + + @Override + public EntityType type() { + return EntityType.ER_MODEL_RELATIONSHIP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + @Nonnull final List urns, @Nonnull final QueryContext context) throws Exception { + final List ermodelrelationUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + ER_MODEL_RELATIONSHIP_ENTITY_NAME, + new HashSet<>(ermodelrelationUrns), + ASPECTS_TO_RESOLVE); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : ermodelrelationUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ERModelRelationMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to load erModelRelationship entity", e); + } + } + + @Nonnull + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "erModelRelationship", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Nonnull + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(context.getOperationContext(), UrnUtils.getUrn(urn)); + return BrowsePathsMapper.map(context, result); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + ENTITY_NAME, + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + public static boolean canUpdateERModelRelation( + @Nonnull QueryContext context, + ERModelRelationshipUrn resourceUrn, + ERModelRelationshipUpdateInput updateInput) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(editPrivilegesGroup, specificPrivilegeGroup)); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + resourceUrn.getEntityType(), + resourceUrn.toString(), + orPrivilegeGroups); + } + + public static boolean canCreateERModelRelation( + @Nonnull QueryContext context, Urn sourceUrn, Urn destinationUrn) { + final ConjunctivePrivilegeGroup editPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + final ConjunctivePrivilegeGroup createPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_ER_MODEL_RELATIONSHIP_PRIVILEGE.getType())); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(editPrivilegesGroup, createPrivilegesGroup)); + boolean sourcePrivilege = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + sourceUrn.getEntityType(), + sourceUrn.toString(), + orPrivilegeGroups); + boolean destinationPrivilege = + AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + destinationUrn.getEntityType(), + destinationUrn.toString(), + orPrivilegeGroups); + return sourcePrivilege && destinationPrivilege; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java new file mode 100644 index 00000000000000..5413352a394b49 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java @@ -0,0 +1,69 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + +import com.linkedin.common.urn.CorpuserUrn; +import com.linkedin.common.urn.ERModelRelationshipUrn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.concurrency.GraphQLConcurrencyUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.types.ermodelrelationship.mappers.ERModelRelationshipUpdateInputMapper; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collection; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +@RequiredArgsConstructor +public class UpdateERModelRelationshipResolver implements DataFetcher> { + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final ERModelRelationshipUpdateInput input = + bindArgument(environment.getArgument("input"), ERModelRelationshipUpdateInput.class); + final String urn = bindArgument(environment.getArgument("urn"), String.class); + ERModelRelationshipUrn inputUrn = ERModelRelationshipUrn.createFromString(urn); + QueryContext context = environment.getContext(); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + if (!ERModelRelationshipType.canUpdateERModelRelation(context, inputUrn, input)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + return GraphQLConcurrencyUtils.supplyAsync( + () -> { + try { + log.debug("Create ERModelRelation input: {}", input); + final Collection proposals = + ERModelRelationshipUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); + + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException( + String.format("Failed to update erModelRelationship entity"), e); + } + return true; + } catch (Exception e) { + log.error( + "Failed to update erModelRelationship to resource with input {}, {}", + input, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update erModelRelationship to resource with input %s", input), + e); + } + }, + this.getClass().getSimpleName(), + "get"); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java new file mode 100644 index 00000000000000..50a7b7f895fe6b --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java @@ -0,0 +1,196 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship.mappers; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.GlobalTags; +import com.linkedin.common.GlossaryTerms; +import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; +import com.linkedin.common.Status; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.ERModelRelationship; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.RelationshipFieldMapping; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.ermodelrelation.ERModelRelationshipProperties; +import com.linkedin.ermodelrelation.EditableERModelRelationshipProperties; +import com.linkedin.metadata.key.ERModelRelationshipKey; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + *

To be replaced by auto-generated mappers implementations + */ +public class ERModelRelationMapper implements ModelMapper { + + public static final ERModelRelationMapper INSTANCE = new ERModelRelationMapper(); + + public static ERModelRelationship map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public ERModelRelationship apply( + @Nullable final QueryContext context, final EntityResponse entityResponse) { + final ERModelRelationship result = new ERModelRelationship(); + final Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ER_MODEL_RELATIONSHIP); + + final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ER_MODEL_RELATIONSHIP_KEY_ASPECT_NAME, this::mapERModelRelationKey); + mappingHelper.mapToResult( + context, ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapProperties); + if (aspectMap != null + && aspectMap.containsKey(EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)) { + mappingHelper.mapToResult( + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + } + if (aspectMap != null && aspectMap.containsKey(INSTITUTIONAL_MEMORY_ASPECT_NAME)) { + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + } + if (aspectMap != null && aspectMap.containsKey(OWNERSHIP_ASPECT_NAME)) { + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + } + if (aspectMap != null && aspectMap.containsKey(STATUS_ASPECT_NAME)) { + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setStatus(StatusMapper.map(context, new Status(dataMap)))); + } + if (aspectMap != null && aspectMap.containsKey(GLOBAL_TAGS_ASPECT_NAME)) { + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + this.mapGlobalTags(context, ermodelrelation, dataMap, entityUrn)); + } + if (aspectMap != null && aspectMap.containsKey(GLOSSARY_TERMS_ASPECT_NAME)) { + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (ermodelrelation, dataMap) -> + ermodelrelation.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + } + return mappingHelper.getResult(); + } + + private void mapEditableProperties( + @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap dataMap) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + new EditableERModelRelationshipProperties(dataMap); + ermodelrelation.setEditableProperties( + com.linkedin.datahub.graphql.generated.ERModelRelationshipEditableProperties.builder() + .setDescription(editableERModelRelationProperties.getDescription()) + .setName(editableERModelRelationProperties.getName()) + .build()); + } + + private void mapERModelRelationKey( + @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap datamap) { + ERModelRelationshipKey ermodelrelationKey = new ERModelRelationshipKey(datamap); + ermodelrelation.setId(ermodelrelationKey.getId()); + } + + private void mapProperties( + @Nullable final QueryContext context, + @Nonnull ERModelRelationship ermodelrelation, + @Nonnull DataMap dataMap) { + final ERModelRelationshipProperties ermodelrelationProperties = + new ERModelRelationshipProperties(dataMap); + ermodelrelation.setProperties( + com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties.builder() + .setName(ermodelrelationProperties.getName()) + .setSource(createPartialDataset(ermodelrelationProperties.getSource())) + .setDestination(createPartialDataset(ermodelrelationProperties.getDestination())) + .setCreatedTime( + ermodelrelationProperties.hasCreated() + && ermodelrelationProperties.getCreated().getTime() > 0 + ? ermodelrelationProperties.getCreated().getTime() + : 0) + .setRelationshipFieldMappings( + ermodelrelationProperties.hasRelationshipFieldMappings() + ? this.mapERModelRelationFieldMappings(ermodelrelationProperties) + : null) + .build()); + + if (ermodelrelationProperties.hasCreated() + && Objects.requireNonNull(ermodelrelationProperties.getCreated()).hasActor()) { + ermodelrelation + .getProperties() + .setCreatedActor( + UrnToEntityMapper.map(context, ermodelrelationProperties.getCreated().getActor())); + } + } + + private Dataset createPartialDataset(@Nonnull Urn datasetUrn) { + + Dataset partialDataset = new Dataset(); + + partialDataset.setUrn(datasetUrn.toString()); + + return partialDataset; + } + + private List mapERModelRelationFieldMappings( + ERModelRelationshipProperties ermodelrelationProperties) { + final List relationshipFieldMappingList = new ArrayList<>(); + + ermodelrelationProperties + .getRelationshipFieldMappings() + .forEach( + relationshipFieldMapping -> + relationshipFieldMappingList.add( + this.mapRelationshipFieldMappings(relationshipFieldMapping))); + + return relationshipFieldMappingList; + } + + private com.linkedin.datahub.graphql.generated.RelationshipFieldMapping + mapRelationshipFieldMappings( + com.linkedin.ermodelrelation.RelationshipFieldMapping relationFieldMapping) { + return com.linkedin.datahub.graphql.generated.RelationshipFieldMapping.builder() + .setDestinationField(relationFieldMapping.getDestinationField()) + .setSourceField(relationFieldMapping.getSourceField()) + .build(); + } + + private void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull ERModelRelationship ermodelrelation, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); + ermodelrelation.setTags(globalTags); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java new file mode 100644 index 00000000000000..d18a3e741c4336 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java @@ -0,0 +1,194 @@ +package com.linkedin.datahub.graphql.types.ermodelrelationship.mappers; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.DatasetUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipEditablePropertiesUpdate; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipPropertiesInput; +import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; +import com.linkedin.datahub.graphql.generated.RelationshipFieldMappingInput; +import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; +import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; +import com.linkedin.ermodelrelation.ERModelRelationshipCardinality; +import com.linkedin.ermodelrelation.ERModelRelationshipProperties; +import com.linkedin.ermodelrelation.EditableERModelRelationshipProperties; +import com.linkedin.ermodelrelation.RelationshipFieldMappingArray; +import com.linkedin.mxe.MetadataChangeProposal; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class ERModelRelationshipUpdateInputMapper + implements InputModelMapper< + ERModelRelationshipUpdateInput, Collection, Urn> { + public static final ERModelRelationshipUpdateInputMapper INSTANCE = + new ERModelRelationshipUpdateInputMapper(); + + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final ERModelRelationshipUpdateInput ermodelrelationUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, ermodelrelationUpdateInput, actor); + } + + @Override + public Collection apply( + @Nullable final QueryContext context, ERModelRelationshipUpdateInput input, Urn actor) { + final Collection proposals = new ArrayList<>(8); + final UpdateMappingHelper updateMappingHelper = + new UpdateMappingHelper(ER_MODEL_RELATIONSHIP_ENTITY_NAME); + final long currentTime = System.currentTimeMillis(); + final AuditStamp auditstamp = new AuditStamp(); + auditstamp.setActor(actor, SetMode.IGNORE_NULL); + auditstamp.setTime(currentTime); + if (input.getProperties() != null) { + com.linkedin.ermodelrelation.ERModelRelationshipProperties ermodelrelationProperties = + createERModelRelationProperties(input.getProperties(), auditstamp); + proposals.add( + updateMappingHelper.aspectToProposal( + ermodelrelationProperties, ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)); + } + if (input.getEditableProperties() != null) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + ermodelrelationshipEditablePropsSettings(input.getEditableProperties()); + proposals.add( + updateMappingHelper.aspectToProposal( + editableERModelRelationProperties, + EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)); + } + return proposals; + } + + private ERModelRelationshipProperties createERModelRelationProperties( + ERModelRelationshipPropertiesInput inputProperties, AuditStamp auditstamp) { + com.linkedin.ermodelrelation.ERModelRelationshipProperties ermodelrelationProperties = + new com.linkedin.ermodelrelation.ERModelRelationshipProperties(); + if (inputProperties.getName() != null) { + ermodelrelationProperties.setName(inputProperties.getName()); + } + try { + if (inputProperties.getSource() != null) { + ermodelrelationProperties.setSource( + DatasetUrn.createFromString(inputProperties.getSource())); + } + if (inputProperties.getDestination() != null) { + ermodelrelationProperties.setDestination( + DatasetUrn.createFromString(inputProperties.getDestination())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); + } + + if (inputProperties.getRelationshipFieldmappings() != null) { + if (inputProperties.getRelationshipFieldmappings().size() > 0) { + com.linkedin.ermodelrelation.RelationshipFieldMappingArray relationshipFieldMappingsArray = + ermodelrelationFieldMappingSettings(inputProperties.getRelationshipFieldmappings()); + ermodelrelationProperties.setCardinality( + ermodelrelationCardinalitySettings(inputProperties.getRelationshipFieldmappings())); + ermodelrelationProperties.setRelationshipFieldMappings(relationshipFieldMappingsArray); + } + + if (inputProperties.getCreated() != null && inputProperties.getCreated()) { + ermodelrelationProperties.setCreated(auditstamp); + } else { + if (inputProperties.getCreatedBy() != null && inputProperties.getCreatedAt() != 0) { + final AuditStamp auditstampEdit = new AuditStamp(); + try { + auditstampEdit.setActor(Urn.createFromString(inputProperties.getCreatedBy())); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + auditstampEdit.setTime(inputProperties.getCreatedAt()); + ermodelrelationProperties.setCreated(auditstampEdit); + } + } + ermodelrelationProperties.setLastModified(auditstamp); + } + return ermodelrelationProperties; + } + + private com.linkedin.ermodelrelation.ERModelRelationshipCardinality + ermodelrelationCardinalitySettings( + List ermodelrelationFieldMapping) { + + Set sourceFields = new HashSet<>(); + Set destFields = new HashSet<>(); + AtomicInteger sourceCount = new AtomicInteger(); + AtomicInteger destCount = new AtomicInteger(); + + ermodelrelationFieldMapping.forEach( + relationshipFieldMappingInput -> { + sourceFields.add(relationshipFieldMappingInput.getSourceField()); + sourceCount.getAndIncrement(); + destFields.add(relationshipFieldMappingInput.getDestinationField()); + destCount.getAndIncrement(); + }); + + if (sourceFields.size() == sourceCount.get()) { + if (destFields.size() == destCount.get()) { + return ERModelRelationshipCardinality.ONE_ONE; + } else { + return ERModelRelationshipCardinality.N_ONE; + } + } else { + if (destFields.size() == destCount.get()) { + return ERModelRelationshipCardinality.ONE_N; + } else { + return ERModelRelationshipCardinality.N_N; + } + } + } + + private com.linkedin.ermodelrelation.RelationshipFieldMappingArray + ermodelrelationFieldMappingSettings( + List ermodelrelationFieldMapping) { + + List relationshipFieldMappingList = + this.mapRelationshipFieldMapping(ermodelrelationFieldMapping); + + return new RelationshipFieldMappingArray(relationshipFieldMappingList); + } + + private List mapRelationshipFieldMapping( + List ermodelrelationFieldMapping) { + + List relationshipFieldMappingList = + new ArrayList<>(); + + ermodelrelationFieldMapping.forEach( + relationshipFieldMappingInput -> { + com.linkedin.ermodelrelation.RelationshipFieldMapping relationshipFieldMapping = + new com.linkedin.ermodelrelation.RelationshipFieldMapping(); + relationshipFieldMapping.setSourceField(relationshipFieldMappingInput.getSourceField()); + relationshipFieldMapping.setDestinationField( + relationshipFieldMappingInput.getDestinationField()); + relationshipFieldMappingList.add(relationshipFieldMapping); + }); + + return relationshipFieldMappingList; + } + + private static EditableERModelRelationshipProperties ermodelrelationshipEditablePropsSettings( + ERModelRelationshipEditablePropertiesUpdate editPropsInput) { + final EditableERModelRelationshipProperties editableERModelRelationProperties = + new EditableERModelRelationshipProperties(); + if (editPropsInput.getName() != null && editPropsInput.getName().trim().length() > 0) { + editableERModelRelationProperties.setName(editPropsInput.getName()); + } + if (editPropsInput.getDescription() != null + && editPropsInput.getDescription().trim().length() > 0) { + editableERModelRelationProperties.setDescription(editPropsInput.getDescription()); + } + return editableERModelRelationProperties; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java new file mode 100644 index 00000000000000..4f2ae014995de0 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java @@ -0,0 +1,132 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.linkedin.common.Ownership; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import com.linkedin.datahub.graphql.generated.FormInfo; +import com.linkedin.datahub.graphql.generated.FormPrompt; +import com.linkedin.datahub.graphql.generated.FormPromptType; +import com.linkedin.datahub.graphql.generated.FormType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.StructuredPropertyParams; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class FormMapper implements ModelMapper { + + public static final FormMapper INSTANCE = new FormMapper(); + + public static Form map(@Nullable final QueryContext context, @Nonnull final EntityResponse form) { + return INSTANCE.apply(context, form); + } + + public Form apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + Form result = new Form(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityUrn.toString()); + result.setType(EntityType.FORM); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(FORM_INFO_ASPECT_NAME, this::mapFormInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (form, dataMap) -> + form.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + + return mappingHelper.getResult(); + } + + private void mapFormInfo(@Nonnull Form form, @Nonnull DataMap dataMap) { + com.linkedin.form.FormInfo gmsFormInfo = new com.linkedin.form.FormInfo(dataMap); + FormInfo formInfo = new FormInfo(); + formInfo.setName(gmsFormInfo.getName()); + formInfo.setType(FormType.valueOf(gmsFormInfo.getType().toString())); + if (gmsFormInfo.hasDescription()) { + formInfo.setDescription(gmsFormInfo.getDescription()); + } + formInfo.setPrompts(this.mapFormPrompts(gmsFormInfo, form.getUrn())); + formInfo.setActors(mapFormActors(gmsFormInfo.getActors())); + form.setInfo(formInfo); + } + + private List mapFormPrompts( + @Nonnull com.linkedin.form.FormInfo gmsFormInfo, @Nonnull String formUrn) { + List formPrompts = new ArrayList<>(); + if (gmsFormInfo.hasPrompts()) { + gmsFormInfo + .getPrompts() + .forEach(FormPrompt -> formPrompts.add(mapFormPrompt(FormPrompt, formUrn))); + } + return formPrompts; + } + + private FormPrompt mapFormPrompt( + @Nonnull com.linkedin.form.FormPrompt gmsFormPrompt, @Nonnull String formUrn) { + final FormPrompt formPrompt = new FormPrompt(); + formPrompt.setId(gmsFormPrompt.getId()); + formPrompt.setTitle(gmsFormPrompt.getTitle()); + formPrompt.setType(FormPromptType.valueOf(gmsFormPrompt.getType().toString())); + formPrompt.setRequired(gmsFormPrompt.isRequired()); + formPrompt.setFormUrn(formUrn); + if (gmsFormPrompt.hasDescription()) { + formPrompt.setDescription(gmsFormPrompt.getDescription()); + } + + if (gmsFormPrompt.hasStructuredPropertyParams()) { + final StructuredPropertyParams params = new StructuredPropertyParams(); + final Urn structuredPropUrn = gmsFormPrompt.getStructuredPropertyParams().getUrn(); + final StructuredPropertyEntity structuredProp = new StructuredPropertyEntity(); + structuredProp.setUrn(structuredPropUrn.toString()); + structuredProp.setType(EntityType.STRUCTURED_PROPERTY); + params.setStructuredProperty(structuredProp); + formPrompt.setStructuredPropertyParams(params); + } + + return formPrompt; + } + + private FormActorAssignment mapFormActors(com.linkedin.form.FormActorAssignment gmsFormActors) { + FormActorAssignment result = new FormActorAssignment(); + result.setOwners(gmsFormActors.isOwners()); + if (gmsFormActors.hasUsers()) { + result.setUsers( + gmsFormActors.getUsers().stream().map(this::mapUser).collect(Collectors.toList())); + } + if (gmsFormActors.hasGroups()) { + result.setGroups( + gmsFormActors.getGroups().stream().map(this::mapGroup).collect(Collectors.toList())); + } + return result; + } + + private CorpUser mapUser(Urn userUrn) { + CorpUser user = new CorpUser(); + user.setUrn(userUrn.toString()); + return user; + } + + private CorpGroup mapGroup(Urn groupUrn) { + CorpGroup group = new CorpGroup(); + group.setUrn(groupUrn.toString()); + return group; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java new file mode 100644 index 00000000000000..fb57b1911a85e6 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.form; + +import static com.linkedin.metadata.Constants.FORM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.FORM_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class FormType implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(FORM_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.FORM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Form.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List formUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + FORM_ENTITY_NAME, + new HashSet<>(formUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : formUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(FormMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Forms", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java new file mode 100644 index 00000000000000..43665b37b9ee87 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormsMapper.java @@ -0,0 +1,133 @@ +package com.linkedin.datahub.graphql.types.form; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.FieldFormPromptAssociationArray; +import com.linkedin.common.FormPromptAssociationArray; +import com.linkedin.common.Forms; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FieldFormPromptAssociation; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptAssociation; +import com.linkedin.datahub.graphql.generated.FormPromptFieldAssociations; +import com.linkedin.datahub.graphql.generated.FormVerificationAssociation; +import com.linkedin.datahub.graphql.generated.ResolvedAuditStamp; +import java.util.ArrayList; +import java.util.List; +import javax.annotation.Nonnull; + +public class FormsMapper { + + public static final FormsMapper INSTANCE = new FormsMapper(); + + public static com.linkedin.datahub.graphql.generated.Forms map( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + return INSTANCE.apply(forms, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Forms apply( + @Nonnull final Forms forms, @Nonnull final String entityUrn) { + final List incompleteForms = new ArrayList<>(); + forms + .getIncompleteForms() + .forEach( + formAssociation -> + incompleteForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List completeForms = new ArrayList<>(); + forms + .getCompletedForms() + .forEach( + formAssociation -> + completeForms.add(this.mapFormAssociation(formAssociation, entityUrn))); + final List verifications = new ArrayList<>(); + forms + .getVerifications() + .forEach( + verificationAssociation -> + verifications.add(this.mapVerificationAssociation(verificationAssociation))); + + return new com.linkedin.datahub.graphql.generated.Forms( + incompleteForms, completeForms, verifications); + } + + private FormAssociation mapFormAssociation( + @Nonnull final com.linkedin.common.FormAssociation association, + @Nonnull final String entityUrn) { + FormAssociation result = new FormAssociation(); + result.setForm( + Form.builder().setType(EntityType.FORM).setUrn(association.getUrn().toString()).build()); + result.setAssociatedUrn(entityUrn); + result.setCompletedPrompts(this.mapPrompts(association.getCompletedPrompts())); + result.setIncompletePrompts(this.mapPrompts(association.getIncompletePrompts())); + return result; + } + + private FormVerificationAssociation mapVerificationAssociation( + @Nonnull final com.linkedin.common.FormVerificationAssociation verificationAssociation) { + FormVerificationAssociation result = new FormVerificationAssociation(); + result.setForm( + Form.builder() + .setType(EntityType.FORM) + .setUrn(verificationAssociation.getForm().toString()) + .build()); + if (verificationAssociation.hasLastModified()) { + result.setLastModified(createAuditStamp(verificationAssociation.getLastModified())); + } + return result; + } + + private List mapPrompts( + @Nonnull final FormPromptAssociationArray promptAssociations) { + List result = new ArrayList<>(); + promptAssociations.forEach( + promptAssociation -> { + FormPromptAssociation association = new FormPromptAssociation(); + association.setId(promptAssociation.getId()); + association.setLastModified(createAuditStamp(promptAssociation.getLastModified())); + if (promptAssociation.hasFieldAssociations()) { + association.setFieldAssociations( + mapFieldAssociations(promptAssociation.getFieldAssociations())); + } + result.add(association); + }); + return result; + } + + private List mapFieldPrompts( + @Nonnull final FieldFormPromptAssociationArray fieldPromptAssociations) { + List result = new ArrayList<>(); + fieldPromptAssociations.forEach( + fieldFormPromptAssociation -> { + FieldFormPromptAssociation association = new FieldFormPromptAssociation(); + association.setFieldPath(fieldFormPromptAssociation.getFieldPath()); + association.setLastModified( + createAuditStamp(fieldFormPromptAssociation.getLastModified())); + result.add(association); + }); + return result; + } + + private FormPromptFieldAssociations mapFieldAssociations( + com.linkedin.common.FormPromptFieldAssociations associationsObj) { + final FormPromptFieldAssociations fieldAssociations = new FormPromptFieldAssociations(); + if (associationsObj.hasCompletedFieldPrompts()) { + fieldAssociations.setCompletedFieldPrompts( + this.mapFieldPrompts(associationsObj.getCompletedFieldPrompts())); + } + if (associationsObj.hasIncompleteFieldPrompts()) { + fieldAssociations.setIncompleteFieldPrompts( + this.mapFieldPrompts(associationsObj.getIncompleteFieldPrompts())); + } + return fieldAssociations; + } + + private ResolvedAuditStamp createAuditStamp(AuditStamp auditStamp) { + final ResolvedAuditStamp resolvedAuditStamp = new ResolvedAuditStamp(); + final CorpUser emptyCreatedUser = new CorpUser(); + emptyCreatedUser.setUrn(auditStamp.getActor().toString()); + resolvedAuditStamp.setActor(emptyCreatedUser); + resolvedAuditStamp.setTime(auditStamp.getTime()); + return resolvedAuditStamp; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index f2c9e962811b90..cec14da4a22958 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,17 +1,23 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryNodeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -20,18 +26,16 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -public class GlossaryNodeType implements com.linkedin.datahub.graphql.types.EntityType { +public class GlossaryNodeType + implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME - ); + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_NODE_KEY_ASPECT_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); private final EntityClient _entityClient; @@ -55,25 +59,31 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryNodeUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryNodeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map glossaryNodeMap = _entityClient.batchGetV2(GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(glossaryNodeUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map glossaryNodeMap = + _entityClient.batchGetV2( + context.getOperationContext(), + GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(glossaryNodeUrns), + ASPECTS_TO_RESOLVE); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsGlossaryNode -> - gmsGlossaryNode == null ? null - : DataFetcherResult.newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) - .build()) + .map( + gmsGlossaryNode -> + gmsGlossaryNode == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryNodeMapper.map(context, gmsGlossaryNode)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load GlossaryNodes", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 3574c17a50923f..087c09eff671d0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,7 +29,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.ArrayList; @@ -39,118 +41,141 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class GlossaryTermType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of(""); - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_TERM_KEY_ASPECT_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, - GLOSSARY_RELATED_TERM_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - BROWSE_PATHS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME - ); - - private final EntityClient _entityClient; - - public GlossaryTermType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return GlossaryTerm.class; - } - - @Override - public EntityType type() { - return EntityType.GLOSSARY_TERM; +public class GlossaryTermType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of(""); + + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_TERM_KEY_ASPECT_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + GLOSSARY_RELATED_TERM_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + BROWSE_PATHS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + STRUCTURED_PROPERTIES_ASPECT_NAME, + FORMS_ASPECT_NAME); + + private final EntityClient _entityClient; + + public GlossaryTermType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return GlossaryTerm.class; + } + + @Override + public EntityType type() { + return EntityType.GLOSSARY_TERM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryTermUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map glossaryTermMap = + _entityClient.batchGetV2( + context.getOperationContext(), + GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(glossaryTermUrns), + ASPECTS_TO_RESOLVE); + + final List gmsResults = new ArrayList<>(urns.size()); + for (Urn urn : glossaryTermUrns) { + gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsGlossaryTerm -> + gmsGlossaryTerm == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryTermMapper.map(context, gmsGlossaryTerm)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load GlossaryTerms", e); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryTermUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map glossaryTermMap = _entityClient.batchGetV2(GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(glossaryTermUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : glossaryTermUrns) { - gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsGlossaryTerm -> - gmsGlossaryTerm == null ? null - : DataFetcherResult.newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load GlossaryTerms", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "glossaryTerm", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( - "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "glossaryTerm", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "glossaryTerm", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "glossaryTerm", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "glossaryTerm", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), GlossaryTermUtils.getGlossaryTermUrn(urn)); + return BrowsePathsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java index 93b6ab53d5a3ac..59f7cc8a9c8284 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java @@ -1,27 +1,27 @@ package com.linkedin.datahub.graphql.types.glossary; import com.linkedin.common.urn.GlossaryTermUrn; - import java.net.URISyntaxException; import java.util.regex.Pattern; public class GlossaryTermUtils { - private GlossaryTermUtils() { } + private GlossaryTermUtils() {} - static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { - try { - return GlossaryTermUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); - } + static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { + try { + return GlossaryTermUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); } + } - public static String getGlossaryTermName(String hierarchicalName) { - if (hierarchicalName.contains(".")) { - String[] nodes = hierarchicalName.split(Pattern.quote(".")); - return nodes[nodes.length - 1]; - } - return hierarchicalName; + public static String getGlossaryTermName(String hierarchicalName) { + if (hierarchicalName.contains(".")) { + String[] nodes = hierarchicalName.split(Pattern.quote(".")); + return nodes[nodes.length - 1]; } + return hierarchicalName; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 6a1d849dd23bf5..4912d18614f415 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,33 +1,43 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.Forms; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryNodeProperties; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; - +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; +import javax.annotation.Nullable; public class GlossaryNodeMapper implements ModelMapper { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); - public static GlossaryNode map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static GlossaryNode map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { + public GlossaryNode apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { GlossaryNode result = new GlossaryNode(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.GLOSSARY_NODE); @@ -35,22 +45,45 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_NODE_INFO_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + mappingHelper.mapToResult( + GLOSSARY_NODE_INFO_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap, entityUrn))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryNode.class); + } else { + return mappingHelper.getResult(); + } } - private GlossaryNodeProperties mapGlossaryNodeProperties(@Nonnull DataMap dataMap) { + private GlossaryNodeProperties mapGlossaryNodeProperties( + @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { GlossaryNodeInfo glossaryNodeInfo = new GlossaryNodeInfo(dataMap); GlossaryNodeProperties result = new GlossaryNodeProperties(); result.setDescription(glossaryNodeInfo.getDefinition()); if (glossaryNodeInfo.hasName()) { result.setName(glossaryNodeInfo.getName()); } + if (glossaryNodeInfo.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryNodeInfo.getCustomProperties(), entityUrn)); + } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java index 2f99700bc30a14..12ba8c1e088f09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java @@ -1,41 +1,44 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.GlossaryTermInfo; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermInfoMapper { - public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); + public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); - public static GlossaryTermInfo map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - return INSTANCE.apply(glossaryTermInfo, entityUrn); - } + public static GlossaryTermInfo map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + return INSTANCE.apply(glossaryTermInfo, entityUrn); + } - public GlossaryTermInfo apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); - glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); - if (glossaryTermInfo.hasName()) { - glossaryTermInfoResult.setName(glossaryTermInfo.getName()); - } - if (glossaryTermInfo.hasSourceRef()) { - glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); - } - if (glossaryTermInfo.hasSourceUrl()) { - glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); - } - if (glossaryTermInfo.hasCustomProperties()) { - glossaryTermInfoResult.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); - } - return glossaryTermInfoResult; + public GlossaryTermInfo apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = + new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); + glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); + if (glossaryTermInfo.hasName()) { + glossaryTermInfoResult.setName(glossaryTermInfo.getName()); + } + if (glossaryTermInfo.hasSourceRef()) { + glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); + } + if (glossaryTermInfo.hasSourceUrl()) { + glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); + } + if (glossaryTermInfo.hasCustomProperties()) { + glossaryTermInfoResult.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } + return glossaryTermInfoResult; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index c98177b458dea3..1274646f45ec49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,11 +1,17 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; @@ -13,73 +19,108 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; -import com.linkedin.domain.Domains; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermMapper implements ModelMapper { - public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); + public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); - public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static GlossaryTerm map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - @Override - public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { - GlossaryTerm result = new GlossaryTerm(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public GlossaryTerm apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + GlossaryTerm result = new GlossaryTerm(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.GLOSSARY_TERM); - final String legacyName = GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.GLOSSARY_TERM); + final String legacyName = + GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setGlossaryTermInfo(GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setProperties(GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setGlossaryTermInfo( + GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setProperties( + GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - // If there's no name property, resort to the legacy name computation. - if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { - result.getGlossaryTermInfo().setName(legacyName); - } - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + // If there's no name property, resort to the legacy name computation. + if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { + result.getGlossaryTermInfo().setName(legacyName); + } + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); + } + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryTerm.class); + } else { return mappingHelper.getResult(); } + } - private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); - glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); - glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); - } + private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); + glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); + glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); - } + private void mapDomains( + @Nullable QueryContext context, + @Nonnull GlossaryTerm glossaryTerm, + @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + glossaryTerm.setDomain(DomainAssociationMapper.map(context, domains, glossaryTerm.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java index 6b358331833937..94edfcbd314552 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java @@ -2,25 +2,27 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GlossaryTermProperties; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermPropertiesMapper { public static final GlossaryTermPropertiesMapper INSTANCE = new GlossaryTermPropertiesMapper(); - public static GlossaryTermProperties map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + public static GlossaryTermProperties map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { return INSTANCE.apply(glossaryTermInfo, entityUrn); } - public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); + public GlossaryTermProperties apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = + new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); result.setDefinition(glossaryTermInfo.getDefinition()); result.setDescription(glossaryTermInfo.getDefinition()); result.setTermSource(glossaryTermInfo.getTermSource()); @@ -34,7 +36,8 @@ public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.Glossar result.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); } if (glossaryTermInfo.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index a64b0f7dc64fbe..705b924d208ffb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,51 +1,72 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; -import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; -import java.util.stream.Collectors; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.common.GlossaryTermAssociation; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; +import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermsMapper { - public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); + public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); - public static GlossaryTerms map( - @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(glossaryTerms, entityUrn); - } + public static GlossaryTerms map( + @Nonnull final QueryContext context, + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, glossaryTerms, entityUrn); + } - public GlossaryTerms apply(@Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); - result.setTerms(glossaryTerms.getTerms().stream().map( - association -> this.mapGlossaryTermAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public GlossaryTerms apply( + @Nonnull final QueryContext context, + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTerms result = + new com.linkedin.datahub.graphql.generated.GlossaryTerms(); + result.setTerms( + glossaryTerms.getTerms().stream() + .filter( + association -> + context == null || canView(context.getOperationContext(), association.getUrn())) + .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } - private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( - @Nonnull final GlossaryTermAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); - final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); - resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); - resultGlossaryTerm.setUrn(input.getUrn().toString()); - resultGlossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); - result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); - return result; + private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( + @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = + new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); + resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); + resultGlossaryTerm.setUrn(input.getUrn().toString()); + resultGlossaryTerm.setName( + GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); + result.setTerm(resultGlossaryTerm); + + if (input.hasActor()) { + CorpUser actor = new CorpUser(); + actor.setUrn(input.getActor().toString()); + actor.setType(EntityType.CORP_USER); + result.setActor(actor); + } + if (entityUrn != null) { + result.setAssociatedUrn(entityUrn.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java new file mode 100644 index 00000000000000..f39549fdc6eeda --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java @@ -0,0 +1,88 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; + +import com.linkedin.common.GlobalTags; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.datahub.graphql.generated.IncidentSource; +import com.linkedin.datahub.graphql.generated.IncidentSourceType; +import com.linkedin.datahub.graphql.generated.IncidentState; +import com.linkedin.datahub.graphql.generated.IncidentStatus; +import com.linkedin.datahub.graphql.generated.IncidentType; +import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.metadata.Constants; +import javax.annotation.Nullable; + +/** Maps a GMS {@link EntityResponse} to a GraphQL incident. */ +public class IncidentMapper { + + public static Incident map(@Nullable QueryContext context, final EntityResponse entityResponse) { + final Incident result = new Incident(); + final Urn entityUrn = entityResponse.getUrn(); + final EnvelopedAspectMap aspects = entityResponse.getAspects(); + result.setType(EntityType.INCIDENT); + result.setUrn(entityUrn.toString()); + + final EnvelopedAspect envelopedIncidentInfo = aspects.get(Constants.INCIDENT_INFO_ASPECT_NAME); + if (envelopedIncidentInfo != null) { + final IncidentInfo info = new IncidentInfo(envelopedIncidentInfo.getValue().data()); + // Assumption alert! This assumes the incident type in GMS exactly equals that in GraphQL + result.setIncidentType(IncidentType.valueOf(info.getType().name())); + result.setCustomType(info.getCustomType(GetMode.NULL)); + result.setTitle(info.getTitle(GetMode.NULL)); + result.setDescription(info.getDescription(GetMode.NULL)); + result.setPriority(info.getPriority(GetMode.NULL)); + // TODO: Support multiple entities per incident. + result.setEntity(UrnToEntityMapper.map(context, info.getEntities().get(0))); + if (info.hasSource()) { + result.setSource(mapIncidentSource(context, info.getSource())); + } + if (info.hasStatus()) { + result.setStatus(mapStatus(context, info.getStatus())); + } + result.setCreated(AuditStampMapper.map(context, info.getCreated())); + } else { + throw new RuntimeException(String.format("Incident does not exist!. urn: %s", entityUrn)); + } + + final EnvelopedAspect envelopedTags = aspects.get(GLOBAL_TAGS_ASPECT_NAME); + if (envelopedTags != null) { + result.setTags( + GlobalTagsMapper.map( + context, new GlobalTags(envelopedTags.getValue().data()), entityUrn)); + } + + return result; + } + + private static IncidentStatus mapStatus( + @Nullable QueryContext context, final com.linkedin.incident.IncidentStatus incidentStatus) { + final IncidentStatus result = new IncidentStatus(); + result.setState(IncidentState.valueOf(incidentStatus.getState().name())); + result.setMessage(incidentStatus.getMessage(GetMode.NULL)); + result.setLastUpdated(AuditStampMapper.map(context, incidentStatus.getLastUpdated())); + return result; + } + + private static IncidentSource mapIncidentSource( + @Nullable QueryContext context, final com.linkedin.incident.IncidentSource incidentSource) { + final IncidentSource result = new IncidentSource(); + result.setType(IncidentSourceType.valueOf(incidentSource.getType().name())); + if (incidentSource.hasSourceUrn()) { + result.setSource(UrnToEntityMapper.map(context, incidentSource.getSourceUrn())); + } + return result; + } + + private IncidentMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java new file mode 100644 index 00000000000000..780a19e8fae186 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java @@ -0,0 +1,87 @@ +package com.linkedin.datahub.graphql.types.incident; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import graphql.execution.DataFetcherResult; +import java.net.URISyntaxException; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + +public class IncidentType + implements com.linkedin.datahub.graphql.types.EntityType { + + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(Constants.INCIDENT_INFO_ASPECT_NAME, Constants.GLOBAL_TAGS_ASPECT_NAME); + private final EntityClient _entityClient; + + public IncidentType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.INCIDENT; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Incident.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List incidentUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.INCIDENT_ENTITY_NAME, + new HashSet<>(incidentUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : incidentUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(IncidentMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Incidents", e); + } + } + + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java new file mode 100644 index 00000000000000..2c5e84dad28c21 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ingest/secret/mapper/DataHubSecretValueMapper.java @@ -0,0 +1,55 @@ +package com.linkedin.datahub.graphql.types.ingest.secret.mapper; + +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; + +import com.linkedin.common.AuditStamp; +import com.linkedin.data.template.RecordTemplate; +import com.linkedin.data.template.SetMode; +import com.linkedin.entity.EntityResponse; +import com.linkedin.secret.DataHubSecretValue; +import java.util.Objects; +import javax.annotation.Nonnull; + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + *

To be replaced by auto-generated mappers implementations + */ +public class DataHubSecretValueMapper { + + public static final DataHubSecretValueMapper INSTANCE = new DataHubSecretValueMapper(); + + public static DataHubSecretValue map( + EntityResponse fromSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + return INSTANCE.apply(fromSecret, name, value, description, auditStamp); + } + + public DataHubSecretValue apply( + EntityResponse existingSecret, + @Nonnull final String name, + @Nonnull final String value, + String description, + AuditStamp auditStamp) { + final DataHubSecretValue result; + if (Objects.nonNull(existingSecret)) { + result = + new DataHubSecretValue( + existingSecret.getAspects().get(SECRET_VALUE_ASPECT_NAME).getValue().data()); + } else { + result = new DataHubSecretValue(); + } + + result.setName(name); + result.setValue(value); + result.setDescription(description, SetMode.IGNORE_NULL); + if (Objects.nonNull(auditStamp)) { + result.setCreated(auditStamp); + } + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index d575a81f4ae038..2b576230c99a23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -1,28 +1,33 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; - import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +public class AutoCompleteResultsMapper + implements ModelMapper { -public class AutoCompleteResultsMapper implements ModelMapper { - - public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); + public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); - } + public static AutoCompleteResults map( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(context, results); + } - @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { - final AutoCompleteResults result = new AutoCompleteResults(); - result.setQuery(input.getQuery()); - result.setSuggestions(input.getSuggestions()); - result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect( - Collectors.toList())); - return result; - } + @Override + public AutoCompleteResults apply( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult input) { + final AutoCompleteResults result = new AutoCompleteResults(); + result.setQuery(input.getQuery()); + result.setSuggestions(input.getSuggestions()); + result.setEntities( + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index ea44c4409b7097..2d5deec7edb513 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.Constants; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathMapper implements ModelMapper { - public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); + public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); - } + public static BrowsePath map(@Nullable final QueryContext context, @Nonnull final String input) { + return INSTANCE.apply(context, input); + } - @Override - public BrowsePath apply(@Nonnull final String input) { - final BrowsePath browsePath = new BrowsePath(); - final List path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()); - browsePath.setPath(path); - return browsePath; - } + @Override + public BrowsePath apply(@Nullable final QueryContext context, @Nonnull final String input) { + final BrowsePath browsePath = new BrowsePath(); + final List path = + Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList()); + browsePath.setPath(path); + return browsePath; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index 4dac4468a80d5e..bb70e1ae4b77d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,25 +1,28 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathsMapper implements ModelMapper, List> { - public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); + public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List map(@Nonnull final List input) { - return INSTANCE.apply(input); - } + public static List map( + @Nullable final QueryContext context, @Nonnull final List input) { + return INSTANCE.apply(context, input); + } - @Override - public List apply(@Nonnull final List input) { - List results = new ArrayList<>(); - for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); - } - return results; + @Override + public List apply( + @Nullable final QueryContext context, @Nonnull final List input) { + List results = new ArrayList<>(); + for (String pathStr : input) { + results.add(BrowsePathMapper.map(context, pathStr)); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index c3e74c28fe59dc..3c2661a80b873e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroup; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResults; @@ -8,13 +9,13 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; - +import javax.annotation.Nullable; public class BrowseResultMapper { - private BrowseResultMapper() { - } + private BrowseResultMapper() {} - public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { + public static BrowseResults map( + @Nullable final QueryContext context, com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumElements()) { @@ -26,12 +27,15 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setTotal(input.getNumElements()); final BrowseResultMetadata browseResultMetadata = new BrowseResultMetadata(); - browseResultMetadata.setPath(BrowsePathMapper.map(input.getMetadata().getPath()).getPath()); + browseResultMetadata.setPath( + BrowsePathMapper.map(context, input.getMetadata().getPath()).getPath()); browseResultMetadata.setTotalNumEntities(input.getMetadata().getTotalNumEntities()); result.setMetadata(browseResultMetadata); List entities = - input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(Collectors.toList()); + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) + .collect(Collectors.toList()); result.setEntities(entities); List groups = @@ -41,7 +45,8 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) return result; } - private static BrowseResultGroup mapGroup(@Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { + private static BrowseResultGroup mapGroup( + @Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { final BrowseResultGroup result = new BrowseResultGroup(); result.setName(group.getName()); result.setCount(group.getCount()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index e6172debb439e2..984ef0fdcf2543 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Maps an input of type I to an output of type O with actor context. - */ +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nullable; + +/** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper { - O apply(final I input, final A actor); -} \ No newline at end of file + O apply(@Nullable final QueryContext context, final I input, final A actor); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 5ba32b0c2a77c1..7dd12d62765c60 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -1,84 +1,116 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregationMetadata; +import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FacetMetadata; import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.SearchResult; import com.linkedin.datahub.graphql.generated.SearchSuggestion; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.metadata.entity.validation.ValidationApiUtils; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class MapperUtils { - private MapperUtils() { + private MapperUtils() {} - } - - public static SearchResult mapResult(SearchEntity searchEntity) { - return new SearchResult(UrnToEntityMapper.map(searchEntity.getEntity()), + public static SearchResult mapResult( + @Nullable final QueryContext context, SearchEntity searchEntity) { + return new SearchResult( + UrnToEntityMapper.map(context, searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), - getMatchedFieldEntry(searchEntity.getMatchedFields())); + getMatchedFieldEntry(context, searchEntity.getMatchedFields())); } - public static FacetMetadata mapFacet(com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { + public static FacetMetadata mapFacet( + @Nullable final QueryContext context, + com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); - List aggregationFacets = List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); - List isEntityTypeFilter = aggregationFacets.stream().map( - facet -> facet.equals("entity") || facet.contains("_entityType")).collect(Collectors.toList()); + List aggregationFacets = + List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); + List isEntityTypeFilter = + aggregationFacets.stream() + .map(facet -> facet.equals("entity") || facet.contains("_entityType")) + .collect(Collectors.toList()); facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( - Optional.ofNullable(aggregationMetadata.getDisplayName()).orElse(aggregationMetadata.getName())); - facetMetadata.setAggregations(aggregationMetadata.getFilterValues() - .stream() - .map(filterValue -> new AggregationMetadata(convertFilterValue(filterValue.getValue(), isEntityTypeFilter), - filterValue.getFacetCount(), - filterValue.getEntity() == null ? null : UrnToEntityMapper.map(filterValue.getEntity()))) - .collect(Collectors.toList())); + Optional.ofNullable(aggregationMetadata.getDisplayName()) + .orElse(aggregationMetadata.getName())); + facetMetadata.setAggregations( + aggregationMetadata.getFilterValues().stream() + .map( + filterValue -> + new AggregationMetadata( + convertFilterValue(filterValue.getValue(), isEntityTypeFilter), + filterValue.getFacetCount(), + filterValue.getEntity() == null + ? null + : UrnToEntityMapper.map(context, filterValue.getEntity()))) + .collect(Collectors.toList())); return facetMetadata; } public static String convertFilterValue(String filterValue, List isEntityTypeFilter) { String[] aggregations = filterValue.split(AGGREGATION_SEPARATOR_CHAR); - return IntStream.range(0, aggregations.length).mapToObj( - idx -> idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) ? EntityTypeMapper.getType(aggregations[idx]).toString() : aggregations[idx]) + return IntStream.range(0, aggregations.length) + .mapToObj( + idx -> + idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) + ? EntityTypeMapper.getType(aggregations[idx]).toString() + : aggregations[idx]) .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - public static List getMatchedFieldEntry(List highlightMetadata) { + public static List getMatchedFieldEntry( + @Nullable final QueryContext context, + List highlightMetadata) { return highlightMetadata.stream() - .map(field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { Urn urn = Urn.createFromString(field.getValue()); - matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { - log.warn("Failed to create urn from MatchedField value: {}", field.getValue(), e); + ValidationApiUtils.validateUrn( + context.getOperationContext().getEntityRegistry(), urn); + matchedField.setEntity(UrnToEntityMapper.map(context, urn)); + } catch (IllegalArgumentException | URISyntaxException e) { + log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } } - } - return matchedField; - }) + return matchedField; + }) .collect(Collectors.toList()); } - public static SearchSuggestion mapSearchSuggestion(com.linkedin.metadata.search.SearchSuggestion suggestion) { - return new SearchSuggestion(suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + public static SearchSuggestion mapSearchSuggestion( + com.linkedin.metadata.search.SearchSuggestion suggestion) { + return new SearchSuggestion( + suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + } + + public static EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { + EntityPath entityPath = new EntityPath(); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); + return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 08afbd510b98f3..8df26365c45aad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,9 +1,10 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Simple interface for classes capable of mapping an input of type I to - * an output of type O. - */ +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +/** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper { - O apply(final I input); + O apply(@Nullable final QueryContext context, @Nonnull final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java index 903e9625247346..e0ac0336c8715d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java @@ -3,7 +3,5 @@ import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.metadata.aspect.EnvelopedAspect; - -public interface TimeSeriesAspectMapper extends ModelMapper { - -} +public interface TimeSeriesAspectMapper + extends ModelMapper {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index dd00727fc2845a..88214ac999a7b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; @@ -11,18 +15,16 @@ import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - +import javax.annotation.Nullable; public class UrnScrollAcrossLineageResultsMapper { public static ScrollAcrossLineageResults map( - LineageScrollResult searchResult) { - return new UrnScrollAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageScrollResult searchResult) { + return new UrnScrollAcrossLineageResultsMapper().apply(context, searchResult); } - public ScrollAcrossLineageResults apply(LineageScrollResult input) { + public ScrollAcrossLineageResults apply( + @Nullable final QueryContext context, LineageScrollResult input) { final ScrollAcrossLineageResults result = new ScrollAcrossLineageResults(); result.setNextScrollId(input.getScrollId()); @@ -30,25 +32,34 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(f -> mapFacet(context, f)) + .collect(Collectors.toList())); return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) .build(); } - private EntityPath mapPath(UrnArray path) { + private EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index fd774d73f3df78..10d17bf1756e7f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -1,19 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ScrollResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - +import javax.annotation.Nullable; public class UrnScrollResultsMapper { public static ScrollResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult scrollResult) { - return new UrnScrollResultsMapper().apply(scrollResult); + return new UrnScrollResultsMapper().apply(context, scrollResult); } - public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { + public ScrollResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult input) { final ScrollResults result = new ScrollResults(); if (!input.hasScrollId() && (!input.hasPageSize() || !input.hasNumEntities())) { @@ -25,8 +28,14 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(f -> MapperUtils.mapFacet(context, f)) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index ae87d0269c188a..d0c5605976d633 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -1,30 +1,31 @@ package com.linkedin.datahub.graphql.types.mappers; -import com.linkedin.common.UrnArray; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; -import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; +import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import java.util.ArrayList; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - +import javax.annotation.Nullable; public class UrnSearchAcrossLineageResultsMapper { public static SearchAcrossLineageResults map( - LineageSearchResult searchResult) { - return new UrnSearchAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageSearchResult searchResult) { + return new UrnSearchAcrossLineageResultsMapper().apply(context, searchResult); } - public SearchAcrossLineageResults apply(LineageSearchResult input) { + public SearchAcrossLineageResults apply( + @Nullable final QueryContext context, LineageSearchResult input) { final SearchAcrossLineageResults result = new SearchAcrossLineageResults(); result.setStart(input.getFrom()); @@ -32,35 +33,45 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(f -> MapperUtils.mapFacet(context, f)) + .collect(Collectors.toList())); if (input.hasFreshness()) { FreshnessStats outputFreshness = new FreshnessStats(); outputFreshness.setCached(input.getFreshness().isCached()); - outputFreshness.setSystemFreshness(input.getFreshness().getSystemFreshness().entrySet().stream().map(x -> - SystemFreshness.builder() - .setSystemName(x.getKey()) - .setFreshnessMillis(x.getValue()) - .build()).collect(Collectors.toList())); + outputFreshness.setSystemFreshness( + input.getFreshness().getSystemFreshness().entrySet().stream() + .map( + x -> + SystemFreshness.builder() + .setSystemName(x.getKey()) + .setFreshnessMillis(x.getValue()) + .build()) + .collect(Collectors.toList())); result.setFreshness(outputFreshness); } return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) + .setDegrees(new ArrayList<>(searchEntity.getDegrees())) + .setExplored(Boolean.TRUE.equals(searchEntity.isExplored())) + .setIgnoredAsHop(Boolean.TRUE.equals(searchEntity.isIgnoredAsHop())) + .setTruncatedChildren(Boolean.TRUE.equals(searchEntity.isTruncatedChildren())) .build(); } - - private EntityPath mapPath(UrnArray path) { - EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); - return entityPath; - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index b16e2f10d1df7a..c7c50c8f40c15c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -1,19 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - +import javax.annotation.Nullable; public class UrnSearchResultsMapper { public static SearchResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult searchResult) { - return new UrnSearchResultsMapper().apply(searchResult); + return new UrnSearchResultsMapper().apply(context, searchResult); } - public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { + public SearchResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult input) { final SearchResults result = new SearchResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumEntities()) { @@ -25,9 +28,18 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); - result.setSuggestions(searchResultMetadata.getSuggestions().stream().map(MapperUtils::mapSearchSuggestion).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(f -> MapperUtils.mapFacet(context, f)) + .collect(Collectors.toList())); + result.setSuggestions( + searchResultMetadata.getSuggestions().stream() + .map(MapperUtils::mapSearchSuggestion) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index f5594afc1a5b5d..dd7792e328386a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,7 +29,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,103 +40,126 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLFeatureTableType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); - private final EntityClient _entityClient; - - public MLFeatureTableType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLFEATURE_TABLE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLFeatureTable.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlFeatureTableUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlFeatureTableMap = _entityClient.batchGetV2(ML_FEATURE_TABLE_ENTITY_NAME, - new HashSet<>(mlFeatureTableUrns), null, context.getAuthentication()); - - final List gmsResults = mlFeatureTableUrns.stream() - .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlFeatureTable -> gmsMlFeatureTable == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatureTables", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeatureTable", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlFeatureTable", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLFeatureTableType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); + private final EntityClient _entityClient; + + public MLFeatureTableType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLFEATURE_TABLE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLFeatureTable.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlFeatureTableUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlFeatureTableMap = + _entityClient.batchGetV2( + context.getOperationContext(), + ML_FEATURE_TABLE_ENTITY_NAME, + new HashSet<>(mlFeatureTableUrns), + null); + + final List gmsResults = + mlFeatureTableUrns.stream() + .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlFeatureTable -> + gmsMlFeatureTable == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureTableMapper.map(context, gmsMlFeatureTable)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatureTables", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "mlFeatureTable", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "mlFeatureTable", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlFeatureTable", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(context.getOperationContext(), MLModelUtils.getUrn(urn)); + return BrowsePathsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index f5e0d80948bcc1..c6ef1c68997524 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,7 +21,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +32,94 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLFeatureType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLFeatureType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLFeatureType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLFEATURE; - } + @Override + public EntityType type() { + return EntityType.MLFEATURE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLFeature.class; - } + @Override + public Class objectClass() { + return MLFeature.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlFeatureUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlFeatureUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map mlFeatureMap = _entityClient.batchGetV2(ML_FEATURE_ENTITY_NAME, - new HashSet<>(mlFeatureUrns), null, context.getAuthentication()); + try { + final Map mlFeatureMap = + _entityClient.batchGetV2( + context.getOperationContext(), + ML_FEATURE_ENTITY_NAME, + new HashSet<>(mlFeatureUrns), + null); - final List gmsResults = mlFeatureUrns.stream() - .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) - .collect(Collectors.toList()); + final List gmsResults = + mlFeatureUrns.stream() + .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlFeature -> gmsMlFeature == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatures", e); - } + return gmsResults.stream() + .map( + gmsMlFeature -> + gmsMlFeature == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureMapper.map(context, gmsMlFeature)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatures", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeature", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "mlFeature", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "mlFeature", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 05b70c15bafc6d..7cc96ff47cc083 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,7 +29,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,104 +40,127 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelGroupType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelGroupType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL_GROUP; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModelGroup.class; - } - - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlModelGroupUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_GROUP_ENTITY_NAME, - new HashSet<>(mlModelGroupUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelGroupUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModelGroup -> gmsMlModelGroup == null ? null - : DataFetcherResult.newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModelGroups", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModelGroup", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModelGroup", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelGroupType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModelGroup.class; + } + + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlModelGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + context.getOperationContext(), + ML_MODEL_GROUP_ENTITY_NAME, + new HashSet<>(mlModelGroupUrns), + null); + + final List gmsResults = + mlModelGroupUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModelGroup -> + gmsMlModelGroup == null + ? null + : DataFetcherResult.newResult() + .data(MLModelGroupMapper.map(context, gmsMlModelGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModelGroups", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "mlModelGroup", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "mlModelGroup", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModelGroup", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), MLModelUtils.getMLModelGroupUrn(urn)); + return BrowsePathsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index ef4be247a246b6..6c79e6b2183d24 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,7 +29,6 @@ import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,102 +40,124 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelType implements SearchableEntityType, BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModel.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlModelUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_ENTITY_NAME, - new HashSet<>(mlModelUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModel -> gmsMlModel == null ? null - : DataFetcherResult.newResult() - .data(MLModelMapper.map(gmsMlModel)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModels", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModel", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModel", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelType + implements SearchableEntityType, BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModel.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlModelUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + context.getOperationContext(), + ML_MODEL_ENTITY_NAME, + new HashSet<>(mlModelUrns), + null); + + final List gmsResults = + mlModelUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModel -> + gmsMlModel == null + ? null + : DataFetcherResult.newResult() + .data(MLModelMapper.map(context, gmsMlModel)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModels", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "mlModel", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(context.getOperationContext(), "mlModel", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + "mlModel", + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), MLModelUtils.getMLModelUrn(urn)); + return BrowsePathsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java index ff51bab6c114e3..ccecb0ae6406fb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java @@ -1,44 +1,47 @@ package com.linkedin.datahub.graphql.types.mlmodel; -import java.net.URISyntaxException; - import com.linkedin.common.urn.MLFeatureUrn; import com.linkedin.common.urn.MLModelUrn; import com.linkedin.common.urn.Urn; +import java.net.URISyntaxException; public class MLModelUtils { - private MLModelUtils() { } + private MLModelUtils() {} - static MLModelUrn getMLModelUrn(String modelUrn) { - try { - return MLModelUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); - } + static MLModelUrn getMLModelUrn(String modelUrn) { + try { + return MLModelUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); } - - static Urn getMLModelGroupUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getMLModelGroupUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); } - - static MLFeatureUrn getMLFeatureUrn(String modelUrn) { - try { - return MLFeatureUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); - } + } + + static MLFeatureUrn getMLFeatureUrn(String modelUrn) { + try { + return MLFeatureUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); } - - static Urn getUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index a6963e6b20abdb..d2563bd98ba6ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,7 +21,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +32,95 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLPrimaryKeyType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLPrimaryKeyType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLPrimaryKeyType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLPRIMARY_KEY; - } + @Override + public EntityType type() { + return EntityType.MLPRIMARY_KEY; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLPrimaryKey.class; - } + @Override + public Class objectClass() { + return MLPrimaryKey.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlPrimaryKeyUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { - try { - final Map mlPrimaryKeyMap = _entityClient.batchGetV2(ML_PRIMARY_KEY_ENTITY_NAME, - new HashSet<>(mlPrimaryKeyUrns), null, context.getAuthentication()); + final List mlPrimaryKeyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - final List gmsResults = mlPrimaryKeyUrns.stream() - .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) - .collect(Collectors.toList()); + try { + final Map mlPrimaryKeyMap = + _entityClient.batchGetV2( + context.getOperationContext(), + ML_PRIMARY_KEY_ENTITY_NAME, + new HashSet<>(mlPrimaryKeyUrns), + null); - return gmsResults.stream() - .map(gmsMlPrimaryKey -> gmsMlPrimaryKey == null ? null - : DataFetcherResult.newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); - } - } + final List gmsResults = + mlPrimaryKeyUrns.stream() + .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) + .collect(Collectors.toList()); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlPrimaryKey", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + return gmsResults.stream() + .map( + gmsMlPrimaryKey -> + gmsMlPrimaryKey == null + ? null + : DataFetcherResult.newResult() + .data(MLPrimaryKeyMapper.map(context, gmsMlPrimaryKey)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "mlPrimaryKey", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), "mlPrimaryKey", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index c82909d49acbf5..6485313b030cba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -1,23 +1,26 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; public class BaseDataMapper implements ModelMapper { - public static final BaseDataMapper INSTANCE = new BaseDataMapper(); + public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); - } + public static BaseData map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(context, input); + } - @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { - final BaseData result = new BaseData(); - result.setDataset(input.getDataset().toString()); - result.setMotivation(input.getMotivation()); - result.setPreProcessing(input.getPreProcessing()); - return result; - } + @Override + public BaseData apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { + final BaseData result = new BaseData(); + result.setDataset(input.getDataset().toString()); + result.setMotivation(input.getMotivation()); + result.setPreProcessing(input.getPreProcessing()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index c19cb7bae2aff6..b3b642ec9f1261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -1,30 +1,39 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class CaveatsAndRecommendationsMapper implements ModelMapper { +public class CaveatsAndRecommendationsMapper + implements ModelMapper< + com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { - public static final CaveatsAndRecommendationsMapper INSTANCE = new CaveatsAndRecommendationsMapper(); + public static final CaveatsAndRecommendationsMapper INSTANCE = + new CaveatsAndRecommendationsMapper(); - public static CaveatsAndRecommendations map(@NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); - } + public static CaveatsAndRecommendations map( + @Nullable QueryContext context, + @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + return INSTANCE.apply(context, caveatsAndRecommendations); + } - @Override - public CaveatsAndRecommendations apply(com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); - if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); - } - if (caveatsAndRecommendations.getRecommendations() != null) { - result.setRecommendations(caveatsAndRecommendations.getRecommendations()); - } - if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { - result.setIdealDatasetCharacteristics(caveatsAndRecommendations.getIdealDatasetCharacteristics()); - } - return result; + @Override + public CaveatsAndRecommendations apply( + @Nullable QueryContext context, + com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); + if (caveatsAndRecommendations.getCaveats() != null) { + result.setCaveats(CaveatsDetailsMapper.map(context, caveatsAndRecommendations.getCaveats())); + } + if (caveatsAndRecommendations.getRecommendations() != null) { + result.setRecommendations(caveatsAndRecommendations.getRecommendations()); + } + if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { + result.setIdealDatasetCharacteristics( + caveatsAndRecommendations.getIdealDatasetCharacteristics()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 22617a8bc03e7f..9b89e955205461 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -1,25 +1,29 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class CaveatsDetailsMapper implements ModelMapper { +public class CaveatsDetailsMapper + implements ModelMapper { - public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); + public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); - } + public static CaveatDetails map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(context, input); + } - @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - final CaveatDetails result = new CaveatDetails(); + @Override + public CaveatDetails apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + final CaveatDetails result = new CaveatDetails(); - result.setCaveatDescription(input.getCaveatDescription()); - result.setGroupsNotRepresented(input.getGroupsNotRepresented()); - result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); - return result; - } + result.setCaveatDescription(input.getCaveatDescription()); + result.setGroupsNotRepresented(input.getGroupsNotRepresented()); + result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 1d967619d43cbc..4d0983177fb746 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -1,26 +1,32 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class EthicalConsiderationsMapper implements ModelMapper { +public class EthicalConsiderationsMapper + implements ModelMapper { - public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); + public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); - public static EthicalConsiderations map(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); - } + public static EthicalConsiderations map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + return INSTANCE.apply(context, ethicalConsiderations); + } - @Override - public EthicalConsiderations apply(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - final EthicalConsiderations result = new EthicalConsiderations(); - result.setData(ethicalConsiderations.getData()); - result.setHumanLife(ethicalConsiderations.getHumanLife()); - result.setMitigations(ethicalConsiderations.getMitigations()); - result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); - result.setUseCases(ethicalConsiderations.getUseCases()); - return result; - } + @Override + public EthicalConsiderations apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + final EthicalConsiderations result = new EthicalConsiderations(); + result.setData(ethicalConsiderations.getData()); + result.setHumanLife(ethicalConsiderations.getHumanLife()); + result.setMitigations(ethicalConsiderations.getMitigations()); + result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); + result.setUseCases(ethicalConsiderations.getUseCases()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 73aa8db362a545..442ce052c1c8f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -1,28 +1,32 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; - +import javax.annotation.Nullable; import lombok.NonNull; -public class HyperParameterMapMapper implements ModelMapper { - - public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); +public class HyperParameterMapMapper + implements ModelMapper { - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); - } + public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { - final HyperParameterMap result = new HyperParameterMap(); + public static HyperParameterMap map( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(context, input); + } - for (String key: input.keySet()) { - result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); - } + @Override + public HyperParameterMap apply( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { + final HyperParameterMap result = new HyperParameterMap(); - return result; + for (String key : input.keySet()) { + result.setKey(key); + result.setValue(HyperParameterValueTypeMapper.map(context, input.get(key))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 6509b0e6cfa84f..8b5bc445a36092 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -1,39 +1,46 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BooleanBox; import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.HyperParameterValueType; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class HyperParameterValueTypeMapper implements ModelMapper { +public class HyperParameterValueTypeMapper + implements ModelMapper< + com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { - public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); + public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); - public static HyperParameterValueType map(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); - } + public static HyperParameterValueType map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + return INSTANCE.apply(context, input); + } - @Override - public HyperParameterValueType apply(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - HyperParameterValueType result = null; + @Override + public HyperParameterValueType apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + HyperParameterValueType result = null; - if (input.isString()) { - result = new StringBox(input.getString()); - } else if (input.isBoolean()) { - result = new BooleanBox(input.getBoolean()); - } else if (input.isInt()) { - result = new IntBox(input.getInt()); - } else if (input.isDouble()) { - result = new FloatBox(input.getDouble()); - } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); - } - return result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else if (input.isBoolean()) { + result = new BooleanBox(input.getBoolean()); + } else if (input.isInt()) { + result = new IntBox(input.getInt()); + } else if (input.isDouble()) { + result = new FloatBox(input.getDouble()); + } else if (input.isFloat()) { + result = new FloatBox(Double.valueOf(input.getFloat())); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 47598bc2a3e4c3..6a7d1aae7679b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,29 +1,37 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; -public class IntendedUseMapper implements ModelMapper { +public class IntendedUseMapper + implements ModelMapper { - public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); + public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); - } + public static IntendedUse map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(context, intendedUse); + } - @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - final IntendedUse result = new IntendedUse(); - result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); - result.setPrimaryUses(intendedUse.getPrimaryUses()); - if (intendedUse.getPrimaryUsers() != null) { - result.setPrimaryUsers(intendedUse.getPrimaryUsers().stream().map(v -> IntendedUserType.valueOf(v.toString())).collect(Collectors.toList())); - } - return result; + @Override + public IntendedUse apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + final IntendedUse result = new IntendedUse(); + result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); + result.setPrimaryUses(intendedUse.getPrimaryUses()); + if (intendedUse.getPrimaryUsers() != null) { + result.setPrimaryUsers( + intendedUse.getPrimaryUsers().stream() + .map(v -> IntendedUserType.valueOf(v.toString())) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 010ae477251f39..a4f3aa7a0e2261 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - - +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -13,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; @@ -26,8 +30,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -35,93 +41,133 @@ import com.linkedin.metadata.key.MLFeatureKey; import com.linkedin.ml.metadata.EditableMLFeatureProperties; import com.linkedin.ml.metadata.MLFeatureProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper { - public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { - final MLFeature result = new MLFeature(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); + public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); + + public static MLFeature map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public MLFeature apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final MLFeature result = new MLFeature(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, MLFeatureMapper::mapMLFeatureKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, ML_FEATURE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapMLFeatureProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureMapper::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlFeature, dataMap) -> + mlFeature.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeature.class); + } else { + return mappingHelper.getResult(); } - - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); - mlFeature.setName(mlFeatureKey.getName()); - mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); - } - - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setDescription(featureProperties.getDescription()); - if (featureProperties.getDataType() != null) { - mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); - } + } + + private static void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); + mlFeature.setName(mlFeatureKey.getName()); + mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); + } + + private static void mapMLFeatureProperties( + @Nullable final QueryContext context, + @Nonnull MLFeature mlFeature, + @Nonnull DataMap dataMap) { + MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); + mlFeature.setDescription(featureProperties.getDescription()); + if (featureProperties.getDataType() != null) { + mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } - - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { - EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); - MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeature entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(context, globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); + } + + private static void mapEditableProperties(MLFeature entity, DataMap dataMap) { + EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); + MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 9d647a38d2153a..92d090275867da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,44 +1,50 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.NonNull; -public class MLFeaturePropertiesMapper implements ModelMapper { +public class MLFeaturePropertiesMapper + implements ModelMapper { - public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); + public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); - public static MLFeatureProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); - } + public static MLFeatureProperties map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + return INSTANCE.apply(context, mlFeatureProperties); + } - @Override - public MLFeatureProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - final MLFeatureProperties result = new MLFeatureProperties(); + @Override + public MLFeatureProperties apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + final MLFeatureProperties result = new MLFeatureProperties(); - result.setDescription(mlFeatureProperties.getDescription()); - if (mlFeatureProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); - } - if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); - } - if (mlFeatureProperties.getSources() != null) { - result.setSources(mlFeatureProperties - .getSources() - .stream() - .map(urn -> { + result.setDescription(mlFeatureProperties.getDescription()); + if (mlFeatureProperties.getDataType() != null) { + result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); + } + if (mlFeatureProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(context, mlFeatureProperties.getVersion())); + } + if (mlFeatureProperties.getSources() != null) { + result.setSources( + mlFeatureProperties.getSources().stream() + .map( + urn -> { final Dataset dataset = new Dataset(); dataset.setUrn(urn.toString()); return dataset; - }) - .collect(Collectors.toList())); - } - - return result; + }) + .collect(Collectors.toList())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 3ba9a76c4bdde1..30bf4dda1cf4fd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,9 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -12,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -25,8 +30,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -34,92 +41,140 @@ import com.linkedin.metadata.key.MLFeatureTableKey; import com.linkedin.ml.metadata.EditableMLFeatureTableProperties; import com.linkedin.ml.metadata.MLFeatureTableProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper { - public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { - final MLFeatureTable result = new MLFeatureTable(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE_TABLE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); - mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); - } - - private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { - MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); - mlFeatureTable.setName(mlFeatureTableKey.getName()); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); - mlFeatureTable.setPlatform(partialPlatform); - } - - private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { - MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setDescription(featureTableProperties.getDescription()); + public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); + + public static MLFeatureTable map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public MLFeatureTable apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final MLFeatureTable result = new MLFeatureTable(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE_TABLE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLFeatureTableProperties(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); + + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureTableMapper::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlFeatureTable, dataMap) -> + mlFeatureTable.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeatureTable.class); + } else { + return mappingHelper.getResult(); } - - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { - EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); - MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapMLFeatureTableKey( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { + MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); + mlFeatureTable.setName(mlFeatureTableKey.getName()); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); + mlFeatureTable.setPlatform(partialPlatform); + } + + private static void mapMLFeatureTableProperties( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable mlFeatureTable, + @Nonnull DataMap dataMap, + Urn entityUrn) { + MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); + mlFeatureTable.setFeatureTableProperties( + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); + mlFeatureTable.setProperties( + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); + mlFeatureTable.setDescription(featureTableProperties.getDescription()); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(context, globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable entity, + @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { + EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); + MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index 13e3c795997250..d9fed13ed0d0be 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -1,50 +1,67 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.NonNull; public class MLFeatureTablePropertiesMapper { - public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); + public static final MLFeatureTablePropertiesMapper INSTANCE = + new MLFeatureTablePropertiesMapper(); + + public static MLFeatureTableProperties map( + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + return INSTANCE.apply(context, mlFeatureTableProperties, entityUrn); + } - public static MLFeatureTableProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + public static MLFeatureTableProperties apply( + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + final MLFeatureTableProperties result = new MLFeatureTableProperties(); + + result.setDescription(mlFeatureTableProperties.getDescription()); + if (mlFeatureTableProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlFeatureTableProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) + .map( + urn -> { + final MLFeature mlFeature = new MLFeature(); + mlFeature.setUrn(urn.toString()); + return mlFeature; + }) + .collect(Collectors.toList())); } - public MLFeatureTableProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - final MLFeatureTableProperties result = new MLFeatureTableProperties(); - - result.setDescription(mlFeatureTableProperties.getDescription()); - if (mlFeatureTableProperties.getMlFeatures() != null) { - result.setMlFeatures(mlFeatureTableProperties.getMlFeatures().stream().map(urn -> { - final MLFeature mlFeature = new MLFeature(); - mlFeature.setUrn(urn.toString()); - return mlFeature; - }).collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { - result.setMlPrimaryKeys(mlFeatureTableProperties - .getMlPrimaryKeys() - .stream() - .map(urn -> { + if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { + result.setMlPrimaryKeys( + mlFeatureTableProperties.getMlPrimaryKeys().stream() + .filter(k -> context == null || canView(context.getOperationContext(), k)) + .map( + urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); mlPrimaryKey.setUrn(urn.toString()); return mlPrimaryKey; - }) - .collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); - } + }) + .collect(Collectors.toList())); + } - return result; + if (mlFeatureTableProperties.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index 5cc242d0b19f2f..37989b3bda8273 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -1,26 +1,30 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class MLHyperParamMapper implements ModelMapper { +public class MLHyperParamMapper + implements ModelMapper { - public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); + public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); - } + public static MLHyperParam map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(context, input); + } - @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - final MLHyperParam result = new MLHyperParam(); + @Override + public MLHyperParam apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + final MLHyperParam result = new MLHyperParam(); - result.setDescription(input.getDescription()); - result.setValue(input.getValue()); - result.setCreatedAt(input.getCreatedAt()); - result.setName(input.getName()); - return result; - } + result.setDescription(input.getDescription()); + result.setValue(input.getValue()); + result.setCreatedAt(input.getCreatedAt()); + result.setName(input.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 2545bd5f8a848c..80ebabec283bb3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -1,25 +1,28 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLMetric; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; - public class MLMetricMapper implements ModelMapper { - public static final MLMetricMapper INSTANCE = new MLMetricMapper(); + public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); - } + public static MLMetric map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(context, metric); + } - @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - final MLMetric result = new MLMetric(); - result.setDescription(metric.getDescription()); - result.setValue(metric.getValue()); - result.setCreatedAt(metric.getCreatedAt()); - result.setName(metric.getName()); - return result; - } + @Override + public MLMetric apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { + final MLMetric result = new MLMetric(); + result.setDescription(metric.getDescription()); + result.setValue(metric.getValue()); + result.setCreatedAt(metric.getCreatedAt()); + result.setName(metric.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index 0d32f7275e5fec..4316251a464f28 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,29 +1,40 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; -public class MLModelFactorPromptsMapper implements ModelMapper { +public class MLModelFactorPromptsMapper + implements ModelMapper { - public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); + public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); - public static MLModelFactorPrompts map(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); - } + public static MLModelFactorPrompts map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + return INSTANCE.apply(context, input); + } - @Override - public MLModelFactorPrompts apply(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); - if (input.getEvaluationFactors() != null) { - mlModelFactorPrompts.setEvaluationFactors(input.getEvaluationFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - if (input.getRelevantFactors() != null) { - mlModelFactorPrompts.setRelevantFactors(input.getRelevantFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - return mlModelFactorPrompts; + @Override + public MLModelFactorPrompts apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); + if (input.getEvaluationFactors() != null) { + mlModelFactorPrompts.setEvaluationFactors( + input.getEvaluationFactors().stream() + .map(f -> MLModelFactorsMapper.map(context, f)) + .collect(Collectors.toList())); + } + if (input.getRelevantFactors() != null) { + mlModelFactorPrompts.setRelevantFactors( + input.getRelevantFactors().stream() + .map(f -> MLModelFactorsMapper.map(context, f)) + .collect(Collectors.toList())); } + return mlModelFactorPrompts; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index aa4737dfd229c8..5607ef8c2cf130 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,32 +1,37 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.ArrayList; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.ArrayList; +import javax.annotation.Nullable; import lombok.NonNull; -public class MLModelFactorsMapper implements ModelMapper { +public class MLModelFactorsMapper + implements ModelMapper { - public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); + public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); - public static MLModelFactors map(@NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); - } + public static MLModelFactors map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { + return INSTANCE.apply(context, modelFactors); + } - @Override - public MLModelFactors apply(@NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { - final MLModelFactors result = new MLModelFactors(); - if (mlModelFactors.getEnvironment() != null) { - result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); - } - if (mlModelFactors.getGroups() != null) { - result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); - } - if (mlModelFactors.getInstrumentation() != null) { - result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); - } - return result; + @Override + public MLModelFactors apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { + final MLModelFactors result = new MLModelFactors(); + if (mlModelFactors.getEnvironment() != null) { + result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); + } + if (mlModelFactors.getGroups() != null) { + result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); + } + if (mlModelFactors.getInstrumentation() != null) { + result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 311ee121bcaf98..7e99040e44c82e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,8 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.Ownership; @@ -10,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -23,8 +29,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -32,92 +40,132 @@ import com.linkedin.metadata.key.MLModelGroupKey; import com.linkedin.ml.metadata.EditableMLModelGroupProperties; import com.linkedin.ml.metadata.MLModelGroupProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper { - public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { - final MLModelGroup result = new MLModelGroup(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); - mappingHelper.mapToResult(ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); - } - - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); - mlModelGroup.setName(mlModelGroupKey.getName()); - mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); - mlModelGroup.setPlatform(partialPlatform); - } - - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); - if (modelGroupProperties.getDescription() != null) { - mlModelGroup.setDescription(modelGroupProperties.getDescription()); - } + public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); + + public static MLModelGroup map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public MLModelGroup apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final MLModelGroup result = new MLModelGroup(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModelGroup, dataMap) -> + mlModelGroup.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + ML_MODEL_GROUP_KEY_ASPECT_NAME, MLModelGroupMapper::mapToMLModelGroupKey); + mappingHelper.mapToResult( + context, + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, + MLModelGroupMapper::mapToMLModelGroupProperties); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModelGroup, dataMap) -> + mlModelGroup.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModelGroup, dataMap) -> + mlModelGroup.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> MLModelGroupMapper.mapGlobalTags(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelGroupMapper::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelGroupMapper::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModelGroup, dataMap) -> + mlModelGroup.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((mlModelGroup, dataMap) -> + mlModelGroup.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModelGroup.class); + } else { + return mappingHelper.getResult(); } - - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); + } + + private static void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); + mlModelGroup.setName(mlModelGroupKey.getName()); + mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); + mlModelGroup.setPlatform(partialPlatform); + } + + private static void mapToMLModelGroupProperties( + @Nullable final QueryContext context, MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(context, modelGroupProperties)); + if (modelGroupProperties.getDescription() != null) { + mlModelGroup.setDescription(modelGroupProperties.getDescription()); } - - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { - EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); - MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(context, globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLModelGroup entity, + @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); + } + + private static void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); + MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9a12d7917e6489..9f1918f9ec4893 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,28 +1,35 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; -public class MLModelGroupPropertiesMapper implements ModelMapper { - - public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); +public class MLModelGroupPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - public static MLModelGroupProperties map(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); - } + public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); - @Override - public MLModelGroupProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - final MLModelGroupProperties result = new MLModelGroupProperties(); + public static MLModelGroupProperties map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + return INSTANCE.apply(context, mlModelGroupProperties); + } - result.setDescription(mlModelGroupProperties.getDescription()); - if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); - } - result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + @Override + public MLModelGroupProperties apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + final MLModelGroupProperties result = new MLModelGroupProperties(); - return result; + result.setDescription(mlModelGroupProperties.getDescription()); + if (mlModelGroupProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(context, mlModelGroupProperties.getVersion())); } + result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 0c2eeabe5701d9..a3bc5c663c89ae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -12,6 +16,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -27,8 +33,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -45,127 +53,192 @@ import com.linkedin.ml.metadata.QuantitativeAnalyses; import com.linkedin.ml.metadata.SourceCode; import com.linkedin.ml.metadata.TrainingData; +import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper { - public static final MLModelMapper INSTANCE = new MLModelMapper(); - - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final MLModelMapper INSTANCE = new MLModelMapper(); + + public static MLModel map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public MLModel apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final MLModel result = new MLModel(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, MLModelMapper::mapMLModelKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + ML_MODEL_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> mapMLModelProperties(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (mlModel, dataMap) -> mapGlobalTags(context, mlModel, dataMap, entityUrn)); + mappingHelper.mapToResult( + INTENDED_USE_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setIntendedUse(IntendedUseMapper.map(context, new IntendedUse(dataMap)))); + mappingHelper.mapToResult( + ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setFactorPrompts( + MLModelFactorPromptsMapper.map(context, new MLModelFactorPrompts(dataMap)))); + mappingHelper.mapToResult( + METRICS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(context, new Metrics(dataMap)))); + mappingHelper.mapToResult( + EVALUATION_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEvaluationData( + new EvaluationData(dataMap) + .getEvaluationData().stream() + .map(d -> BaseDataMapper.map(context, d)) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + TRAINING_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setTrainingData( + new TrainingData(dataMap) + .getTrainingData().stream() + .map(d -> BaseDataMapper.map(context, d)) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + QUANTITATIVE_ANALYSES_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setQuantitativeAnalyses( + QuantitativeAnalysesMapper.map(context, new QuantitativeAnalyses(dataMap)))); + mappingHelper.mapToResult( + ETHICAL_CONSIDERATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEthicalConsiderations( + EthicalConsiderationsMapper.map(context, new EthicalConsiderations(dataMap)))); + mappingHelper.mapToResult( + CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setCaveatsAndRecommendations( + CaveatsAndRecommendationsMapper.map( + context, new CaveatsAndRecommendations(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, SOURCE_CODE_ASPECT_NAME, MLModelMapper::mapSourceCode); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + COST_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(context, new Cost(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelMapper::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelMapper::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModel.class); + } else { + return mappingHelper.getResult(); } - - @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { - final MLModel result = new MLModel(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); - mappingHelper.mapToResult(INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); - mappingHelper.mapToResult(ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setFactorPrompts(MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); - mappingHelper.mapToResult(METRICS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); - mappingHelper.mapToResult(EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEvaluationData(new EvaluationData(dataMap).getEvaluationData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(TRAINING_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setTrainingData(new TrainingData(dataMap).getTrainingData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setQuantitativeAnalyses(QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); - mappingHelper.mapToResult(ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEthicalConsiderations(EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); - mappingHelper.mapToResult(CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCaveatsAndRecommendations(CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(COST_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCost(CostMapper.map(new Cost(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - - return mappingHelper.getResult(); + } + + private static void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + MLModelKey mlModelKey = new MLModelKey(dataMap); + mlModel.setName(mlModelKey.getName()); + mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelKey.getPlatform().toString()); + mlModel.setPlatform(partialPlatform); + } + + private static void mapMLModelProperties( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { + MLModelProperties modelProperties = new MLModelProperties(dataMap); + mlModel.setProperties(MLModelPropertiesMapper.map(context, modelProperties, entityUrn)); + if (modelProperties.getDescription() != null) { + mlModel.setDescription(modelProperties.getDescription()); } - - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { - MLModelKey mlModelKey = new MLModelKey(dataMap); - mlModel.setName(mlModelKey.getName()); - mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelKey.getPlatform().toString()); - mlModel.setPlatform(partialPlatform); - } - - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); - if (modelProperties.getDescription() != null) { - mlModel.setDescription(modelProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - mlModel.setGlobalTags(graphQlGlobalTags); - mlModel.setTags(graphQlGlobalTags); - } - - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { - SourceCode sourceCode = new SourceCode(dataMap); - com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = - new com.linkedin.datahub.graphql.generated.SourceCode(); - graphQlSourceCode.setSourceCode(sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map).collect(Collectors.toList())); - mlModel.setSourceCode(graphQlSourceCode); - } - - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModel entity, DataMap dataMap) { - EditableMLModelProperties input = new EditableMLModelProperties(dataMap); - MLModelEditableProperties editableProperties = new MLModelEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(context, globalTags, entityUrn); + mlModel.setGlobalTags(graphQlGlobalTags); + mlModel.setTags(graphQlGlobalTags); + } + + private static void mapSourceCode( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap) { + SourceCode sourceCode = new SourceCode(dataMap); + com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = + new com.linkedin.datahub.graphql.generated.SourceCode(); + graphQlSourceCode.setSourceCode( + sourceCode.getSourceCode().stream() + .map(c -> SourceCodeUrlMapper.map(context, c)) + .collect(Collectors.toList())); + mlModel.setSourceCode(graphQlSourceCode); + } + + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLModel entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); + } + + private static void mapEditableProperties(MLModel entity, DataMap dataMap) { + EditableMLModelProperties input = new EditableMLModelProperties(dataMap); + MLModelEditableProperties editableProperties = new MLModelEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index 554c14e9a4a56d..a89904b3ab915c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,65 +1,81 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroup; +import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; - -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.MLModelProperties; - +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelPropertiesMapper { - public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - - public static MLModelProperties map(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); - } + public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - public MLModelProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - final MLModelProperties result = new MLModelProperties(); + public static MLModelProperties map( + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { + return INSTANCE.apply(context, mlModelProperties, entityUrn); + } - result.setDate(mlModelProperties.getDate()); - result.setDescription(mlModelProperties.getDescription()); - if (mlModelProperties.getExternalUrl() != null) { - result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); - } - if (mlModelProperties.getVersion() != null) { - result.setVersion(mlModelProperties.getVersion().getVersionTag()); - } - result.setType(mlModelProperties.getType()); - if (mlModelProperties.getHyperParams() != null) { - result.setHyperParams(mlModelProperties.getHyperParams().stream().map( - param -> MLHyperParamMapper.map(param)).collect(Collectors.toList())); - } + public MLModelProperties apply( + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { + final MLModelProperties result = new MLModelProperties(); - result.setCustomProperties(CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); + result.setDate(mlModelProperties.getDate()); + result.setDescription(mlModelProperties.getDescription()); + if (mlModelProperties.getExternalUrl() != null) { + result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); + } + if (mlModelProperties.getVersion() != null) { + result.setVersion(mlModelProperties.getVersion().getVersionTag()); + } + result.setType(mlModelProperties.getType()); + if (mlModelProperties.getHyperParams() != null) { + result.setHyperParams( + mlModelProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(context, param)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getTrainingMetrics() != null) { - result.setTrainingMetrics(mlModelProperties.getTrainingMetrics().stream().map(metric -> - MLMetricMapper.map(metric) - ).collect(Collectors.toList())); - } + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); - if (mlModelProperties.getGroups() != null) { - result.setGroups(mlModelProperties.getGroups().stream().map(group -> { - final MLModelGroup subgroup = new MLModelGroup(); - subgroup.setUrn(group.toString()); - return subgroup; - }).collect(Collectors.toList())); - } + if (mlModelProperties.getTrainingMetrics() != null) { + result.setTrainingMetrics( + mlModelProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(context, metric)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getMlFeatures() != null) { - result.setMlFeatures(mlModelProperties - .getMlFeatures() - .stream() - .map(Urn::toString) - .collect(Collectors.toList())); - } - result.setTags(mlModelProperties.getTags()); + if (mlModelProperties.getGroups() != null) { + result.setGroups( + mlModelProperties.getGroups().stream() + .filter(g -> context == null || canView(context.getOperationContext(), g)) + .map( + group -> { + final MLModelGroup subgroup = new MLModelGroup(); + subgroup.setUrn(group.toString()); + return subgroup; + }) + .collect(Collectors.toList())); + } - return result; + if (mlModelProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlModelProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) + .map(Urn::toString) + .collect(Collectors.toList())); } + result.setTags(mlModelProperties.getTags()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 0bd5db4d884ae4..36784f96ea30ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; +import com.linkedin.common.Forms; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -10,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; @@ -22,8 +28,10 @@ import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; +import com.linkedin.datahub.graphql.types.form.FormsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -31,90 +39,133 @@ import com.linkedin.metadata.key.MLPrimaryKeyKey; import com.linkedin.ml.metadata.EditableMLPrimaryKeyProperties; import com.linkedin.ml.metadata.MLPrimaryKeyProperties; +import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper { - public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); + public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLPrimaryKey map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { - final MLPrimaryKey result = new MLPrimaryKey(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLPrimaryKey apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final MLPrimaryKey result = new MLPrimaryKey(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLPRIMARY_KEY); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLPRIMARY_KEY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); - mappingHelper.mapToResult(ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_KEY_ASPECT_NAME, MLPrimaryKeyMapper::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + context, + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, + MLPrimaryKeyMapper::mapMLPrimaryKeyProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLPrimaryKeyMapper::mapDomains); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, MLPrimaryKeyMapper::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((entity, dataMap) -> + entity.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + FORMS_ASPECT_NAME, + ((entity, dataMap) -> + entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLPrimaryKey.class); + } else { + return mappingHelper.getResult(); } + } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); - mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); - mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); - } + private static void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); + mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); + mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); + } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); - if (primaryKeyProperties.getDataType() != null) { - mlPrimaryKey.setDataType(MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); - } + private static void mapMLPrimaryKeyProperties( + @Nullable final QueryContext context, MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); + mlPrimaryKey.setPrimaryKeyProperties( + MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); + mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); + if (primaryKeyProperties.getDataType() != null) { + mlPrimaryKey.setDataType( + MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); } + } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } + private static void mapGlobalTags( + @Nullable final QueryContext context, MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(context, globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLPrimaryKey entity, + @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); + } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { - EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); - MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + private static void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); + MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 39ecd96af182f3..09e41fe7ee4e8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -1,42 +1,50 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import javax.annotation.Nullable; +import lombok.NonNull; -public class MLPrimaryKeyPropertiesMapper implements ModelMapper { +public class MLPrimaryKeyPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { - public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); + public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); - public static MLPrimaryKeyProperties map(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); - } + public static MLPrimaryKeyProperties map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + return INSTANCE.apply(context, mlPrimaryKeyProperties); + } - @Override - public MLPrimaryKeyProperties apply(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); + @Override + public MLPrimaryKeyProperties apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); - result.setDescription(mlPrimaryKeyProperties.getDescription()); - if (mlPrimaryKeyProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); - } - if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); - } - result.setSources(mlPrimaryKeyProperties - .getSources() - .stream() - .map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }) + result.setDescription(mlPrimaryKeyProperties.getDescription()); + if (mlPrimaryKeyProperties.getDataType() != null) { + result.setDataType( + MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); + } + if (mlPrimaryKeyProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(context, mlPrimaryKeyProperties.getVersion())); + } + result.setSources( + mlPrimaryKeyProperties.getSources().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) .collect(Collectors.toList())); - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 05b34ba3acb9c3..ce6357655dfbf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -1,23 +1,26 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; public class MetricsMapper implements ModelMapper { - public static final MetricsMapper INSTANCE = new MetricsMapper(); + public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); - } + public static Metrics map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(context, metrics); + } - @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - final Metrics result = new Metrics(); - result.setDecisionThreshold(metrics.getDecisionThreshold()); - result.setPerformanceMeasures(metrics.getPerformanceMeasures()); - return result; - } + @Override + public Metrics apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { + final Metrics result = new Metrics(); + result.setDecisionThreshold(metrics.getDecisionThreshold()); + result.setPerformanceMeasures(metrics.getPerformanceMeasures()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index 8bd25a44745790..fbb259666c2731 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -1,23 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class QuantitativeAnalysesMapper implements ModelMapper { +public class QuantitativeAnalysesMapper + implements ModelMapper { - public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); + public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); - public static QuantitativeAnalyses map(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); - } + public static QuantitativeAnalyses map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + return INSTANCE.apply(context, quantitativeAnalyses); + } - @Override - public QuantitativeAnalyses apply(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - final QuantitativeAnalyses result = new QuantitativeAnalyses(); - result.setIntersectionalResults(ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); - return result; - } + @Override + public QuantitativeAnalyses apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + final QuantitativeAnalyses result = new QuantitativeAnalyses(); + result.setIntersectionalResults( + ResultsTypeMapper.map(context, quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults( + ResultsTypeMapper.map(context, quantitativeAnalyses.getUnitaryResults())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 78292f08f8cade..e73f80511fbaa5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -1,27 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nullable; import lombok.NonNull; -public class ResultsTypeMapper implements ModelMapper { +public class ResultsTypeMapper + implements ModelMapper { - public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); + public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); - } + public static ResultsType map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(context, input); + } - @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - final ResultsType result; - if (input.isString()) { - result = new StringBox(input.getString()); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); - } - return result; + @Override + public ResultsType apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { + final ResultsType result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index 79dbd2cded4c25..1b0695e5993494 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import javax.annotation.Nonnull; - +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class SourceCodeUrlMapper implements ModelMapper { - public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); +public class SourceCodeUrlMapper + implements ModelMapper { + public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); - } + public static SourceCodeUrl map( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(context, input); + } - @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - final SourceCodeUrl results = new SourceCodeUrl(); - results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); - results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); - return results; - } + @Override + public SourceCodeUrl apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + final SourceCodeUrl results = new SourceCodeUrl(); + results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); + results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 6ad0945b0621fd..4020ef6e35ece9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -1,21 +1,26 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; import com.linkedin.common.VersionTag; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class VersionTagMapper implements ModelMapper { - public static final VersionTagMapper INSTANCE = new VersionTagMapper(); +public class VersionTagMapper + implements ModelMapper { + public static final VersionTagMapper INSTANCE = new VersionTagMapper(); - public static com.linkedin.datahub.graphql.generated.VersionTag map(@Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); - } + public static com.linkedin.datahub.graphql.generated.VersionTag map( + @Nullable QueryContext context, @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(context, versionTag); + } - @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { - final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); - result.setVersionTag(input.getVersionTag()); - return result; - } + @Override + public com.linkedin.datahub.graphql.generated.VersionTag apply( + @Nullable QueryContext context, @Nonnull final VersionTag input) { + final com.linkedin.datahub.graphql.generated.VersionTag result = + new com.linkedin.datahub.graphql.generated.VersionTag(); + result.setVersionTag(input.getVersionTag()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index 080cdeba09f19e..8eeda9331ad8ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.notebook; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -18,25 +21,24 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.BrowsableEntityType; import com.linkedin.datahub.graphql.types.MutableType; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookUpdateInputMapper; -import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -53,25 +55,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class NotebookType implements SearchableEntityType, BrowsableEntityType, - MutableType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - NOTEBOOK_KEY_ASPECT_NAME, - NOTEBOOK_INFO_ASPECT_NAME, - NOTEBOOK_CONTENT_ASPECT_NAME, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); +public class NotebookType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + NOTEBOOK_KEY_ASPECT_NAME, + NOTEBOOK_INFO_ASPECT_NAME, + NOTEBOOK_CONTENT_ASPECT_NAME, + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); private final EntityClient _entityClient; @@ -80,45 +82,73 @@ public NotebookType(EntityClient entityClient) { } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { + @Nonnull final QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final SearchResult searchResult = _entityClient.search(NOTEBOOK_ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + NOTEBOOK_ENTITY_NAME, + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), NOTEBOOK_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); } @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse(NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); - return BrowseResultMapper.map(result); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), + NOTEBOOK_ENTITY_NAME, + pathStr, + facetFilters, + start, + count); + return BrowseResultMapper.map(context, result); } @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(NotebookUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + context.getOperationContext(), NotebookUrn.createFromString(urn)); + return BrowsePathsMapper.map(context, result); } @Override @@ -137,22 +167,26 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map notebookMap = _entityClient.batchGetV2(NOTEBOOK_ENTITY_NAME, new HashSet<>(urns), - ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map notebookMap = + _entityClient.batchGetV2( + context.getOperationContext(), + NOTEBOOK_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE); return urns.stream() .map(urn -> notebookMap.getOrDefault(urn, null)) - .map(entityResponse -> entityResponse == null - ? null - : DataFetcherResult.newResult() - .data(NotebookMapper.map(entityResponse)) - .build()) + .map( + entityResponse -> + entityResponse == null + ? null + : DataFetcherResult.newResult() + .data(NotebookMapper.map(context, entityResponse)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Notebook", e); @@ -165,18 +199,21 @@ public Class inputClass() { } @Override - public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) + public Notebook update( + @Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) throws Exception { if (!isAuthorized(urn, input, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - Collection proposals = NotebookUpdateInputMapper.map(input, actor); + CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + Collection proposals = + NotebookUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } @@ -184,12 +221,13 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, return load(urn, context).getData(); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { // Decide whether the current principal should be allowed to update the Dataset. final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.NOTEBOOK_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); @@ -197,9 +235,9 @@ private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput u private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List specificPrivileges = new ArrayList<>(); if (updateInput.getOwnership() != null) { @@ -211,12 +249,12 @@ private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateIn if (updateInput.getTags() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index 2b937c86c9779e..109006f9d4a90a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.GlobalTags; @@ -10,6 +13,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.ChartCell; @@ -26,11 +31,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; import com.linkedin.datahub.graphql.types.common.mappers.ChangeAuditStampsMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -44,18 +49,17 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; +import javax.annotation.Nullable; public class NotebookMapper implements ModelMapper { public static final NotebookMapper INSTANCE = new NotebookMapper(); - public static Notebook map(EntityResponse response) { - return INSTANCE.apply(response); + public static Notebook map(@Nullable final QueryContext context, EntityResponse response) { + return INSTANCE.apply(context, response); } @Override - public Notebook apply(EntityResponse response) { + public Notebook apply(@Nullable final QueryContext context, EntityResponse response) { final Notebook convertedNotebook = new Notebook(); Urn entityUrn = response.getUrn(); @@ -63,58 +67,91 @@ public Notebook apply(EntityResponse response) { convertedNotebook.setType(EntityType.NOTEBOOK); EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); - mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); - mappingHelper.mapToResult(NOTEBOOK_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); - mappingHelper.mapToResult(EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> notebook.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn) - )); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> - notebook.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, NotebookMapper::mapNotebookKey); + mappingHelper.mapToResult( + NOTEBOOK_INFO_ASPECT_NAME, + (entity, dataMap) -> mapNotebookInfo(context, entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + context, NOTEBOOK_CONTENT_ASPECT_NAME, NotebookMapper::mapNotebookContent); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, NotebookMapper::mapEditableNotebookProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(context, new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, NotebookMapper::mapDomains); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, NotebookMapper::mapSubTypes); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, DATA_PLATFORM_INSTANCE_ASPECT_NAME, NotebookMapper::mapDataPlatformInstance); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Notebook.class); + } else { + return mappingHelper.getResult(); + } } - private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { + private static void mapDataPlatformInstance( + @Nullable final QueryContext context, Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); - notebook.setPlatform(DataPlatform - .builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(dataPlatformInstance.getPlatform().toString()) - .build()); - notebook.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + notebook.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(dataPlatformInstance.getPlatform().toString()) + .build()); + notebook.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap))); } - private void mapSubTypes(Notebook notebook, DataMap dataMap) { + private static void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); + com.linkedin.datahub.graphql.generated.SubTypes subTypes = + new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); notebook.setSubTypes(subTypes); } } - private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final NotebookKey notebookKey = new NotebookKey(dataMap); notebook.setNotebookId(notebookKey.getNotebookId()); notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); + private static void mapNotebookInfo( + @Nullable final QueryContext context, + @Nonnull Notebook notebook, + @Nonnull DataMap dataMap, + Urn entityUrn) { + final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = + new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); - notebookInfo.setChangeAuditStamps(ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + notebookInfo.setChangeAuditStamps( + ChangeAuditStampsMapper.map(context, gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -122,82 +159,98 @@ private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMa } if (gmsNotebookInfo.hasCustomProperties()) { - notebookInfo.setCustomProperties(CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); + notebookInfo.setCustomProperties( + CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); } notebook.setInfo(notebookInfo); } - private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); + private static void mapNotebookContent( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + com.linkedin.notebook.NotebookContent pegasusNotebookContent = + new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); - notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); + notebookContent.setCells(mapNotebookCells(context, pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List mapNotebookCells(com.linkedin.notebook.NotebookCellArray pegasusCells) { + private static List mapNotebookCells( + @Nullable final QueryContext context, com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() - .map(pegasusCell -> { - NotebookCell notebookCell = new NotebookCell(); - NotebookCellType cellType = NotebookCellType.valueOf(pegasusCell.getType().toString()); - notebookCell.setType(cellType); - switch (cellType) { - case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); - break; - case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); - break; - case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); - break; - default: - throw new DataHubGraphQLException(String.format("Un-supported NotebookCellType: %s", cellType), - DataHubGraphQLErrorCode.SERVER_ERROR); - } - return notebookCell; - }) + .map( + pegasusCell -> { + NotebookCell notebookCell = new NotebookCell(); + NotebookCellType cellType = + NotebookCellType.valueOf(pegasusCell.getType().toString()); + notebookCell.setType(cellType); + switch (cellType) { + case CHART_CELL: + notebookCell.setChartCell(mapChartCell(context, pegasusCell.getChartCell())); + break; + case TEXT_CELL: + notebookCell.setTextCell(mapTextCell(context, pegasusCell.getTextCell())); + break; + case QUERY_CELL: + notebookCell.setQueryChell(mapQueryCell(context, pegasusCell.getQueryCell())); + break; + default: + throw new DataHubGraphQLException( + String.format("Un-supported NotebookCellType: %s", cellType), + DataHubGraphQLErrorCode.SERVER_ERROR); + } + return notebookCell; + }) .collect(Collectors.toList()); } - private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) { + private static ChartCell mapChartCell( + @Nullable final QueryContext context, com.linkedin.notebook.ChartCell pegasusChartCell) { ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); - chartCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + chartCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(context, pegasusChartCell.getChangeAuditStamps())); return chartCell; } - private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { + private static TextCell mapTextCell( + @Nullable final QueryContext context, com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); - textCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + textCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(context, pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } - private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) { + private static QueryCell mapQueryCell( + @Nullable final QueryContext context, com.linkedin.notebook.QueryCell pegasusQueryCell) { QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); - queryCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + queryCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(context, pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { - queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); + queryCell.setLastExecuted(AuditStampMapper.map(context, pegasusQueryCell.getLastExecuted())); } return queryCell; } - private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); + private static void mapEditableNotebookProperties( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + final EditableNotebookProperties editableNotebookProperties = + new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); notebookEditableProperties.setDescription(editableNotebookProperties.getDescription()); notebook.setEditableProperties(notebookEditableProperties); } - private void mapDomains(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - notebook.setDomain(DomainAssociationMapper.map(domains, notebook.getUrn())); + notebook.setDomain(DomainAssociationMapper.map(context, domains, notebook.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0c3787d630500a..62d1e488482ac3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -16,22 +19,23 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class NotebookUpdateInputMapper implements InputModelMapper, - Urn> { +public class NotebookUpdateInputMapper + implements InputModelMapper, Urn> { public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); - public static Collection map(@Nonnull final NotebookUpdateInput notebookUpdateInput, + public static Collection map( + @Nullable final QueryContext context, + @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(notebookUpdateInput, actor); + return INSTANCE.apply(context, notebookUpdateInput, actor); } @Override - public Collection apply(NotebookUpdateInput input, Urn actor) { + public Collection apply( + @Nullable final QueryContext context, NotebookUpdateInput input, Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(NOTEBOOK_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -39,27 +43,33 @@ public Collection apply(NotebookUpdateInput input, Urn a auditStamp.setTime(System.currentTimeMillis()); if (input.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(input.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(context, input.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + input.getTags().getTags().stream() + .map(t -> TagAssociationUpdateMapper.map(context, t)) + .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); editableDashboardProperties.setDescription(input.getEditableProperties().getDescription()); if (!editableDashboardProperties.hasCreated()) { editableDashboardProperties.setCreated(auditStamp); } editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index 79f95ac8439a55..b4b01acf54451c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; @@ -20,14 +22,12 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class OwnershipType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,26 +46,34 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List ownershipTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List ownershipTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(OWNERSHIP_TYPE_ENTITY_NAME, new HashSet<>(ownershipTypeUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + OWNERSHIP_TYPE_ENTITY_NAME, + new HashSet<>(ownershipTypeUrns), + ASPECTS_TO_FETCH); final List gmsResults = new ArrayList<>(); for (Urn urn : ownershipTypeUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null : DataFetcherResult.newResult() - .data(OwnershipTypeMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(OwnershipTypeMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Custom Ownership Types", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 37b59b679e3ac3..76d41897dafd66 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -1,32 +1,34 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class OwnershipTypeMapper implements ModelMapper { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); - public static OwnershipTypeEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static OwnershipTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { + public OwnershipTypeEntity apply(@Nullable QueryContext context, @Nonnull EntityResponse input) { final OwnershipTypeEntity result = new OwnershipTypeEntity(); result.setUrn(input.getUrn().toString()); @@ -34,12 +36,14 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { EnvelopedAspectMap aspectMap = input.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); return mappingHelper.getResult(); } - private void mapOwnershipTypeInfo(@Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { + private void mapOwnershipTypeInfo( + @Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { final com.linkedin.ownership.OwnershipTypeInfo gmsOwnershipTypeInfo = new com.linkedin.ownership.OwnershipTypeInfo(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 167e1615fc4cc5..e40ae84f2c131c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -1,8 +1,11 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.DataHubPolicy; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,31 +27,32 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class DataHubPolicyMapper implements ModelMapper { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); - public static DataHubPolicy map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubPolicy map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubPolicy apply(@Nonnull final EntityResponse entityResponse) { + public DataHubPolicy apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubPolicy result = new DataHubPolicy(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATAHUB_POLICY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); + mappingHelper.mapToResult(context, DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); return mappingHelper.getResult(); } - private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { + private void mapDataHubPolicyInfo( + @Nullable QueryContext context, @Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { DataHubPolicyInfo policyInfo = new DataHubPolicyInfo(dataMap); policy.setDescription(policyInfo.getDescription()); // Careful - we assume no other Policy types or states have been ingested using a backdoor. @@ -59,7 +63,7 @@ private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMa policy.setActors(mapActors(policyInfo.getActors())); policy.setEditable(policyInfo.isEditable()); if (policyInfo.hasResources()) { - policy.setResources(mapResources(policyInfo.getResources())); + policy.setResources(mapResources(context, policyInfo.getResources())); } } @@ -71,21 +75,26 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { // Change here is not executed at the moment - leaving it for the future UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } - private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) { + private ResourceFilter mapResources( + @Nullable QueryContext context, final DataHubResourceFilter resourceFilter) { final ResourceFilter result = new ResourceFilter(); result.setAllResources(resourceFilter.isAllResources()); if (resourceFilter.hasType()) { @@ -95,29 +104,39 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) result.setResources(resourceFilter.getResources()); } if (resourceFilter.hasFilter()) { - result.setFilter(mapFilter(resourceFilter.getFilter())); + result.setFilter(mapFilter(context, resourceFilter.getFilter())); } return result; } - private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { + private PolicyMatchFilter mapFilter( + @Nullable QueryContext context, final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(c -> mapValue(context, c)) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private PolicyMatchCriterionValue mapValue(@Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(context, urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 4cec59009af3fe..4e448af2ec93b6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubPolicyType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubPolicyType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_POLICY_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,22 +44,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + POLICY_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH); final List gmsResults = new ArrayList<>(); for (Urn urn : roleUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubPolicyMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubPolicyMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index 791197c7d47e49..674011a4f2f288 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.post; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Media; @@ -15,20 +18,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class PostMapper implements ModelMapper { public static final PostMapper INSTANCE = new PostMapper(); - public static Post map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Post map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Post apply(@Nonnull final EntityResponse entityResponse) { + public Post apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Post result = new Post(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index cf77821b1a2808..e71b569e9ae238 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -1,9 +1,14 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.QueryEntity; @@ -11,6 +16,7 @@ import com.linkedin.datahub.graphql.generated.QuerySource; import com.linkedin.datahub.graphql.generated.QueryStatement; import com.linkedin.datahub.graphql.generated.QuerySubject; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -20,42 +26,61 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - +@Slf4j public class QueryMapper implements ModelMapper { public static final QueryMapper INSTANCE = new QueryMapper(); - public static QueryEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static QueryEntity map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { + public QueryEntity apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final QueryEntity result = new QueryEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.QUERY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); + mappingHelper.mapToResult(context, QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); mappingHelper.mapToResult(QUERY_SUBJECTS_ASPECT_NAME, this::mapQuerySubjects); + mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapPlatform); return mappingHelper.getResult(); } - private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { + private void mapPlatform(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { + DataPlatformInstance aspect = new DataPlatformInstance(dataMap); + if (aspect.hasPlatform()) { + final DataPlatform platform = new DataPlatform(); + platform.setUrn(aspect.getPlatform().toString()); + platform.setType(EntityType.DATA_PLATFORM); + query.setPlatform(platform); + } + } + + private void mapQueryProperties( + @Nullable final QueryContext context, @Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); + com.linkedin.datahub.graphql.generated.QueryProperties res = + new com.linkedin.datahub.graphql.generated.QueryProperties(); // Query Source must be kept in sync. res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement(new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); + res.setStatement( + new QueryStatement( + queryProperties.getStatement().getValue(), + QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); + if (queryProperties.hasOrigin() && queryProperties.getOrigin() != null) { + res.setOrigin(UrnToEntityMapper.map(context, queryProperties.getOrigin())); + } AuditStamp created = new AuditStamp(); created.setTime(queryProperties.getCreated().getTime()); @@ -73,10 +98,10 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); - List res = querySubjects.getSubjects() - .stream() - .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) - .collect(Collectors.toList()); + List res = + querySubjects.getSubjects().stream() + .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) + .collect(Collectors.toList()); query.setSubjects(res); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index c138cd56f20b3f..2529ca4857869b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,15 +21,14 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - +@Slf4j @RequiredArgsConstructor -public class QueryType implements com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - QUERY_PROPERTIES_ASPECT_NAME, - QUERY_SUBJECTS_ASPECT_NAME); +public class QueryType + implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,25 +47,34 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + log.debug("Fetching query entities: {}", viewUrns); final Map entities = - _entityClient.batchGetV2(QUERY_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + QUERY_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : viewUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(QueryMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(QueryMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index e1762022f4bcbe..b20e78e149c3e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -1,27 +1,33 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DataFlowDataJobsRelationshipsMapper implements - ModelMapper { +public class DataFlowDataJobsRelationshipsMapper + implements ModelMapper { - public static final DataFlowDataJobsRelationshipsMapper INSTANCE = new DataFlowDataJobsRelationshipsMapper(); + public static final DataFlowDataJobsRelationshipsMapper INSTANCE = + new DataFlowDataJobsRelationshipsMapper(); - public static DataFlowDataJobsRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DataFlowDataJobsRelationships map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(context, relationships); + } - @Override - public DataFlowDataJobsRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DataFlowDataJobsRelationships apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 824e1181c58710..6a03a060c3687a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -1,27 +1,33 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class DownstreamEntityRelationshipsMapper implements - ModelMapper { +public class DownstreamEntityRelationshipsMapper + implements ModelMapper { - public static final DownstreamEntityRelationshipsMapper INSTANCE = new DownstreamEntityRelationshipsMapper(); + public static final DownstreamEntityRelationshipsMapper INSTANCE = + new DownstreamEntityRelationshipsMapper(); - public static DownstreamEntityRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DownstreamEntityRelationships map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(context, relationships); + } - @Override - public DownstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DownstreamEntityRelationships apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index 58f4f477bc7e6b..7ab37031d824c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -1,32 +1,40 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.EntityWithRelationships; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class EntityRelationshipLegacyMapper implements ModelMapper { +public class EntityRelationshipLegacyMapper + implements ModelMapper { - public static final EntityRelationshipLegacyMapper INSTANCE = new EntityRelationshipLegacyMapper(); + public static final EntityRelationshipLegacyMapper INSTANCE = + new EntityRelationshipLegacyMapper(); - public static EntityRelationshipLegacy map(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); - } + public static EntityRelationshipLegacy map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + return INSTANCE.apply(context, relationship); + } - @Override - public EntityRelationshipLegacy apply(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); + @Override + public EntityRelationshipLegacy apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); - EntityWithRelationships partialLineageEntity = (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); - if (partialLineageEntity != null) { - result.setEntity(partialLineageEntity); - } - if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); - } - return result; + EntityWithRelationships partialLineageEntity = + (EntityWithRelationships) UrnToEntityMapper.map(context, relationship.getEntity()); + if (partialLineageEntity != null) { + result.setEntity(partialLineageEntity); + } + if (relationship.hasCreated()) { + result.setCreated(AuditStampMapper.map(context, relationship.getCreated())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 7db5e08c73fc60..35abc849e8f978 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -1,25 +1,33 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class UpstreamEntityRelationshipsMapper implements ModelMapper { +public class UpstreamEntityRelationshipsMapper + implements ModelMapper { - public static final UpstreamEntityRelationshipsMapper INSTANCE = new UpstreamEntityRelationshipsMapper(); + public static final UpstreamEntityRelationshipsMapper INSTANCE = + new UpstreamEntityRelationshipsMapper(); - public static UpstreamEntityRelationships map(@Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static UpstreamEntityRelationships map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(context, relationships); + } - @Override - public UpstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public UpstreamEntityRelationships apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.EntityRelationships input) { + final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java new file mode 100644 index 00000000000000..cf40cc51d1e231 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java @@ -0,0 +1,32 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.entity.EntityResponse; +import io.datahubproject.metadata.services.RestrictedService; +import javax.annotation.Nonnull; + +public class RestrictedMapper { + + public static final RestrictedMapper INSTANCE = new RestrictedMapper(); + + public static Restricted map( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + return INSTANCE.apply(entityResponse, restrictedService); + } + + public Restricted apply( + @Nonnull final EntityResponse entityResponse, + @Nonnull final RestrictedService restrictedService) { + final Restricted result = new Restricted(); + Urn entityUrn = entityResponse.getUrn(); + String restrictedUrnString = restrictedService.encryptRestrictedUrn(entityUrn).toString(); + + result.setUrn(restrictedUrnString); + result.setType(EntityType.RESTRICTED); + + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java new file mode 100644 index 00000000000000..32b60de070dd43 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java @@ -0,0 +1,103 @@ +package com.linkedin.datahub.graphql.types.restricted; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.Restricted; +import com.linkedin.datahub.graphql.types.EntityType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import io.datahubproject.metadata.services.RestrictedService; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class RestrictedType implements EntityType { + public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(); + + private final EntityClient _entityClient; + private final RestrictedService _restrictedService; + + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.RESTRICTED; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return Restricted.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List restrictedUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List entityUrns = + restrictedUrns.stream() + .map(_restrictedService::decryptRestrictedUrn) + .collect(Collectors.toList()); + + // Create a map for entityType: entityUrns so we can fetch by entity type below + final Map> entityTypeToUrns = createEntityTypeToUrnsMap(entityUrns); + + try { + // Fetch from the DB for each entity type and add to one result map + final Map entities = new HashMap<>(); + entityTypeToUrns + .keySet() + .forEach( + entityType -> { + try { + entities.putAll( + _entityClient.batchGetV2( + context.getOperationContext(), + entityType, + new HashSet<>(entityTypeToUrns.get(entityType)), + ASPECTS_TO_FETCH)); + } catch (Exception e) { + throw new RuntimeException("Failed to fetch restricted entities", e); + } + }); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : entityUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(RestrictedMapper.map(gmsResult, _restrictedService)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } + + private Map> createEntityTypeToUrnsMap(final List urns) { + final Map> entityTypeToUrns = new HashMap<>(); + urns.forEach( + urn -> { + String entityType = urn.getEntityType(); + List existingUrns = + entityTypeToUrns.computeIfAbsent(entityType, k -> new ArrayList<>()); + existingUrns.add(urn); + }); + return entityTypeToUrns; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 8c6496390943bf..b43c7dc2695c2f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -21,11 +23,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubRoleType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubRoleType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_ROLE_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -45,22 +45,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH); final List gmsResults = new ArrayList<>(); for (Urn urn : roleUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubRoleMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubRoleMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 5ba31a1602780e..7ba42b08cdc6af 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.role.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -9,20 +12,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; public class DataHubRoleMapper implements ModelMapper { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); - public static DataHubRole map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubRole map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubRole apply(@Nonnull final EntityResponse entityResponse) { + public DataHubRole apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubRole result = new DataHubRole(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index 084c4d5033ad0a..6d6a4545dc01d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -11,20 +11,16 @@ import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.SearchableEntityType; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -33,88 +29,102 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class RoleType implements SearchableEntityType, +public class RoleType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ROLE_KEY, - Constants.ROLE_PROPERTIES_ASPECT_NAME, - Constants.ROLE_ACTORS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ROLE_KEY, + Constants.ROLE_PROPERTIES_ASPECT_NAME, + Constants.ROLE_ACTORS_ASPECT_NAME); - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public RoleType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public RoleType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.ROLE; - } + @Override + public EntityType type() { + return EntityType.ROLE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return Role.class; - } + @Override + public Class objectClass() { + return Role.class; + } - @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List externalRolesUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List externalRolesUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ROLE_ENTITY_NAME, - new HashSet<>(externalRolesUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.ROLE_ENTITY_NAME, + new HashSet<>(externalRolesUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); - for (Urn urn : externalRolesUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(RoleMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Role", e); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : externalRolesUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(RoleMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Role", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search(Constants.ROLE_ENTITY_NAME, - query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.ROLE_ENTITY_NAME, - query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + Constants.ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + context.getOperationContext(), Constants.ROLE_ENTITY_NAME, query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index cabace1a524413..2d6bd31c84fd90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,41 +1,42 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; - +import com.linkedin.common.RoleAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.RoleAssociation; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class AccessMapper { - public static final AccessMapper INSTANCE = new AccessMapper(); - - public static com.linkedin.datahub.graphql.generated.Access map( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - return INSTANCE.apply(access, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.Access apply( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); - result.setRoles(access.getRoles().stream().map( - association -> this.mapRoleAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private RoleAssociation mapRoleAssociation(com.linkedin.common.RoleAssociation association, Urn entityUrn) { - RoleAssociation roleAssociation = new RoleAssociation(); - Role role = new Role(); - role.setType(EntityType.ROLE); - role.setUrn(association.getUrn().toString()); - roleAssociation.setRole(role); - roleAssociation.setAssociatedUrn(entityUrn.toString()); - return roleAssociation; - } - + public static final AccessMapper INSTANCE = new AccessMapper(); + + public static com.linkedin.datahub.graphql.generated.Access map( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(access, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Access apply( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.Access result = + new com.linkedin.datahub.graphql.generated.Access(); + RoleAssociationArray roles = + access.getRoles() != null ? access.getRoles() : new RoleAssociationArray(); + result.setRoles( + roles.stream() + .map(association -> this.mapRoleAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private RoleAssociation mapRoleAssociation( + com.linkedin.common.RoleAssociation association, Urn entityUrn) { + RoleAssociation roleAssociation = new RoleAssociation(); + Role role = new Role(); + role.setType(EntityType.ROLE); + role.setUrn(association.getUrn().toString()); + roleAssociation.setRole(role); + roleAssociation.setAssociatedUrn(entityUrn.toString()); + return roleAssociation; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index 3cb0ec942a4576..80337cd9a53388 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Actor; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -15,79 +16,79 @@ import com.linkedin.metadata.key.RoleKey; import com.linkedin.role.Actors; import com.linkedin.role.RoleUserArray; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class RoleMapper implements ModelMapper { - public static final RoleMapper INSTANCE = new RoleMapper(); - - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final RoleMapper INSTANCE = new RoleMapper(); + + public static Role map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { + final RoleProperties propertiesResult = new RoleProperties(); + propertiesResult.setName(e.getName()); + propertiesResult.setDescription(e.getDescription()); + propertiesResult.setType(e.getType()); + propertiesResult.setRequestUrl(e.getRequestUrl()); + + return propertiesResult; + } + + private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { + RoleUser result = new RoleUser(); + CorpUser corpUser = new CorpUser(); + corpUser.setUrn(provisionedUser.getUser().toString()); + result.setUser(corpUser); + return result; + } + + private static Actor mapActor(Actors actors) { + Actor actor = new Actor(); + actor.setUsers(mapRoleUsers(actors.getUsers())); + return actor; + } + + private static List mapRoleUsers(RoleUserArray users) { + if (users == null) { + return null; } + return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + } - private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { - final RoleProperties propertiesResult = new RoleProperties(); - propertiesResult.setName(e.getName()); - propertiesResult.setDescription(e.getDescription()); - propertiesResult.setType(e.getType()); - propertiesResult.setRequestUrl(e.getRequestUrl()); + @Override + public Role apply(@Nullable QueryContext context, EntityResponse input) { - return propertiesResult; - } + final Role result = new Role(); + final Urn entityUrn = input.getUrn(); - private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { - RoleUser result = new RoleUser(); - CorpUser corpUser = new CorpUser(); - corpUser.setUrn(provisionedUser.getUser().toString()); - result.setUser(corpUser); - return result; - } + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ROLE); - private static Actor mapActor(Actors actors) { - Actor actor = new Actor(); - actor.setUsers(mapRoleUsers(actors.getUsers())); - return actor; - } + final EnvelopedAspectMap aspects = input.getAspects(); - private static List mapRoleUsers(RoleUserArray users) { - if (users == null) { - return null; - } - return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); + if (roleKeyAspect != null) { + result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); + } + final EnvelopedAspect envelopedPropertiesAspect = + aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); + if (envelopedPropertiesAspect != null) { + result.setProperties( + mapRoleProperties( + new com.linkedin.role.RoleProperties(envelopedPropertiesAspect.getValue().data()))); } - @Override - public Role apply(EntityResponse input) { - - - final Role result = new Role(); - final Urn entityUrn = input.getUrn(); - - result.setUrn(entityUrn.toString()); - result.setType(EntityType.ROLE); - - final EnvelopedAspectMap aspects = input.getAspects(); - - final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); - if (roleKeyAspect != null) { - result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); - } - final EnvelopedAspect envelopedPropertiesAspect = aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); - if (envelopedPropertiesAspect != null) { - result.setProperties(mapRoleProperties( - new com.linkedin.role.RoleProperties( - envelopedPropertiesAspect.getValue().data())) - ); - } - - final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); - if (envelopedUsers != null) { - result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); - } - - return result; + final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); + if (envelopedUsers != null) { + result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java new file mode 100644 index 00000000000000..b1f27357d45504 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -0,0 +1,70 @@ +package com.linkedin.datahub.graphql.types.schemafield; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.businessattribute.BusinessAttributes; +import com.linkedin.common.Documentation; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; +import com.linkedin.datahub.graphql.types.businessattribute.mappers.BusinessAttributesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DocumentationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.structuredproperty.StructuredPropertiesMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class SchemaFieldMapper implements ModelMapper { + + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + + public static SchemaFieldEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public SchemaFieldEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + Urn entityUrn = entityResponse.getUrn(); + final SchemaFieldEntity result = this.mapSchemaFieldUrn(context, entityUrn); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTIES_ASPECT_NAME, + ((schemaField, dataMap) -> + schemaField.setStructuredProperties( + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); + mappingHelper.mapToResult( + BUSINESS_ATTRIBUTE_ASPECT, + (((schemaField, dataMap) -> + schemaField.setBusinessAttributes( + BusinessAttributesMapper.map(new BusinessAttributes(dataMap), entityUrn))))); + mappingHelper.mapToResult( + DOCUMENTATION_ASPECT_NAME, + (entity, dataMap) -> + entity.setDocumentation(DocumentationMapper.map(context, new Documentation(dataMap)))); + return result; + } + + private SchemaFieldEntity mapSchemaFieldUrn(@Nullable QueryContext context, Urn urn) { + try { + SchemaFieldEntity result = new SchemaFieldEntity(); + result.setUrn(urn.toString()); + result.setType(EntityType.SCHEMA_FIELD); + result.setFieldPath(urn.getEntityKey().get(1)); + Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); + result.setParent(UrnToEntityMapper.map(context, parentUrn)); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to load schemaField entity", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 748753c4e22b13..2fa26d8cf2cdd7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -1,22 +1,40 @@ package com.linkedin.datahub.graphql.types.schemafield; +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; -import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; -public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { +@RequiredArgsConstructor +public class SchemaFieldType + implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() { } + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + STRUCTURED_PROPERTIES_ASPECT_NAME, BUSINESS_ATTRIBUTE_ASPECT, DOCUMENTATION_ASPECT_NAME); + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; @Override public EntityType type() { @@ -34,38 +52,47 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List schemaFieldUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List schemaFieldUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - return schemaFieldUrns.stream() - .map(this::mapSchemaFieldUrn) - .map(schemaFieldEntity -> DataFetcherResult.newResult() - .data(schemaFieldEntity) - .build() - ) - .collect(Collectors.toList()); + Map entities = new HashMap<>(); + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + entities = + _entityClient.batchGetV2( + context.getOperationContext(), + SCHEMA_FIELD_ENTITY_NAME, + new HashSet<>(schemaFieldUrns), + ASPECTS_TO_FETCH); + } - } catch (Exception e) { - throw new RuntimeException("Failed to load schemaField entity", e); - } - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : schemaFieldUrns) { + if (_featureFlags.isSchemaFieldEntityFetchEnabled()) { + gmsResults.add(entities.getOrDefault(urn, null)); + } else { + gmsResults.add( + new EntityResponse() + .setUrn(urn) + .setAspects(new EnvelopedAspectMap()) + .setEntityName(urn.getEntityType())); + } + } + + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(SchemaFieldMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); - private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { - try { - SchemaFieldEntity result = new SchemaFieldEntity(); - result.setUrn(urn.toString()); - result.setType(EntityType.SCHEMA_FIELD); - result.setFieldPath(urn.getEntityKey().get(1)); - Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(parentUrn)); - return result; } catch (Exception e) { throw new RuntimeException("Failed to load schemaField entity", e); } } - } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java new file mode 100644 index 00000000000000..dc1ff7ca329714 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -0,0 +1,85 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyValue; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertiesEntry; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StructuredPropertiesMapper { + + public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); + + public static com.linkedin.datahub.graphql.generated.StructuredProperties map( + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + return INSTANCE.apply(context, structuredProperties); + } + + public com.linkedin.datahub.graphql.generated.StructuredProperties apply( + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + com.linkedin.datahub.graphql.generated.StructuredProperties result = + new com.linkedin.datahub.graphql.generated.StructuredProperties(); + result.setProperties( + structuredProperties.getProperties().stream() + .map(p -> mapStructuredProperty(context, p)) + .collect(Collectors.toList())); + return result; + } + + private StructuredPropertiesEntry mapStructuredProperty( + @Nullable QueryContext context, StructuredPropertyValueAssignment valueAssignment) { + StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); + entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); + final List values = new ArrayList<>(); + final List entities = new ArrayList<>(); + valueAssignment + .getValues() + .forEach( + value -> { + if (value.isString()) { + this.mapStringValue(context, value.getString(), values, entities); + } else if (value.isDouble()) { + values.add(new NumberValue(value.getDouble())); + } + }); + entry.setValues(values); + entry.setValueEntities(entities); + return entry; + } + + private StructuredPropertyEntity createStructuredPropertyEntity( + StructuredPropertyValueAssignment assignment) { + StructuredPropertyEntity entity = new StructuredPropertyEntity(); + entity.setUrn(assignment.getPropertyUrn().toString()); + entity.setType(EntityType.STRUCTURED_PROPERTY); + return entity; + } + + private static void mapStringValue( + @Nullable QueryContext context, + String stringValue, + List values, + List entities) { + try { + final Urn urnValue = Urn.createFromString(stringValue); + entities.add(UrnToEntityMapper.map(context, urnValue)); + } catch (Exception e) { + log.debug("String value is not an urn for this structured property entry"); + } + values.add(new StringValue(stringValue)); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java new file mode 100644 index 00000000000000..ff54131506a7cc --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -0,0 +1,129 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AllowedValue; +import com.linkedin.datahub.graphql.generated.DataTypeEntity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.EntityTypeEntity; +import com.linkedin.datahub.graphql.generated.NumberValue; +import com.linkedin.datahub.graphql.generated.PropertyCardinality; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyDefinition; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.TypeQualifier; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.structured.PropertyValueArray; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +public class StructuredPropertyMapper + implements ModelMapper { + + private static final String ALLOWED_TYPES = "allowedTypes"; + + public static final StructuredPropertyMapper INSTANCE = new StructuredPropertyMapper(); + + public static StructuredPropertyEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } + + @Override + public StructuredPropertyEntity apply( + @Nullable QueryContext queryContext, @Nonnull final EntityResponse entityResponse) { + final StructuredPropertyEntity result = new StructuredPropertyEntity(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.STRUCTURED_PROPERTY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME, (this::mapStructuredPropertyDefinition)); + return mappingHelper.getResult(); + } + + private void mapStructuredPropertyDefinition( + @Nonnull StructuredPropertyEntity extendedProperty, @Nonnull DataMap dataMap) { + com.linkedin.structured.StructuredPropertyDefinition gmsDefinition = + new com.linkedin.structured.StructuredPropertyDefinition(dataMap); + StructuredPropertyDefinition definition = new StructuredPropertyDefinition(); + definition.setQualifiedName(gmsDefinition.getQualifiedName()); + definition.setCardinality( + PropertyCardinality.valueOf(gmsDefinition.getCardinality().toString())); + definition.setImmutable(gmsDefinition.isImmutable()); + definition.setValueType(createDataTypeEntity(gmsDefinition.getValueType())); + if (gmsDefinition.hasDisplayName()) { + definition.setDisplayName(gmsDefinition.getDisplayName()); + } + if (gmsDefinition.getDescription() != null) { + definition.setDescription(gmsDefinition.getDescription()); + } + if (gmsDefinition.hasAllowedValues()) { + definition.setAllowedValues(mapAllowedValues(gmsDefinition.getAllowedValues())); + } + if (gmsDefinition.hasTypeQualifier()) { + definition.setTypeQualifier(mapTypeQualifier(gmsDefinition.getTypeQualifier())); + } + definition.setEntityTypes( + gmsDefinition.getEntityTypes().stream() + .map(this::createEntityTypeEntity) + .collect(Collectors.toList())); + extendedProperty.setDefinition(definition); + } + + private List mapAllowedValues(@Nonnull PropertyValueArray gmsValues) { + List allowedValues = new ArrayList<>(); + gmsValues.forEach( + value -> { + final AllowedValue allowedValue = new AllowedValue(); + if (value.getValue().isString()) { + allowedValue.setValue(new StringValue(value.getValue().getString())); + } else if (value.getValue().isDouble()) { + allowedValue.setValue(new NumberValue(value.getValue().getDouble())); + } + if (value.getDescription() != null) { + allowedValue.setDescription(value.getDescription()); + } + allowedValues.add(allowedValue); + }); + return allowedValues; + } + + private DataTypeEntity createDataTypeEntity(final Urn dataTypeUrn) { + final DataTypeEntity dataType = new DataTypeEntity(); + dataType.setUrn(dataTypeUrn.toString()); + dataType.setType(EntityType.DATA_TYPE); + return dataType; + } + + private TypeQualifier mapTypeQualifier(final StringArrayMap gmsTypeQualifier) { + final TypeQualifier typeQualifier = new TypeQualifier(); + List allowedTypes = gmsTypeQualifier.get(ALLOWED_TYPES); + if (allowedTypes != null) { + typeQualifier.setAllowedTypes( + allowedTypes.stream().map(this::createEntityTypeEntity).collect(Collectors.toList())); + } + return typeQualifier; + } + + private EntityTypeEntity createEntityTypeEntity(final Urn entityTypeUrn) { + return createEntityTypeEntity(entityTypeUrn.toString()); + } + + private EntityTypeEntity createEntityTypeEntity(final String entityTypeUrnStr) { + final EntityTypeEntity entityType = new EntityTypeEntity(); + entityType.setUrn(entityTypeUrnStr); + entityType.setType(EntityType.ENTITY_TYPE); + return entityType; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java new file mode 100644 index 00000000000000..22e161d320f215 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -0,0 +1,79 @@ +package com.linkedin.datahub.graphql.types.structuredproperty; + +import static com.linkedin.metadata.Constants.*; + +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import graphql.execution.DataFetcherResult; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; + +@RequiredArgsConstructor +public class StructuredPropertyType + implements com.linkedin.datahub.graphql.types.EntityType { + + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME); + private final EntityClient _entityClient; + + @Override + public EntityType type() { + return EntityType.STRUCTURED_PROPERTY; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return StructuredPropertyEntity.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List extendedPropertyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + STRUCTURED_PROPERTY_ENTITY_NAME, + new HashSet<>(extendedPropertyUrns), + ASPECTS_TO_FETCH); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : extendedPropertyUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(StructuredPropertyMapper.map(context, gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Queries", e); + } + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index f79b23033c9958..3c07b242e9d813 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.types.tag; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.Entity; @@ -27,7 +29,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -44,136 +45,151 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class TagType implements com.linkedin.datahub.graphql.types.SearchableEntityType, - MutableType { - - private static final Set FACET_FIELDS = Collections.emptySet(); - - private final EntityClient _entityClient; - - public TagType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Tag.class; - } - - @Override - public EntityType type() { - return EntityType.TAG; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class inputClass() { - return TagUpdateInput.class; +public class TagType + implements com.linkedin.datahub.graphql.types.SearchableEntityType, + MutableType { + + private static final Set FACET_FIELDS = Collections.emptySet(); + + private final EntityClient _entityClient; + + public TagType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Tag.class; + } + + @Override + public EntityType type() { + return EntityType.TAG; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class inputClass() { + return TagUpdateInput.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + + final List tagUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map tagMap = + _entityClient.batchGetV2( + context.getOperationContext(), TAG_ENTITY_NAME, new HashSet<>(tagUrns), null); + + final List gmsResults = new ArrayList<>(urns.size()); + for (Urn urn : tagUrns) { + gmsResults.add(tagMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsTag -> + gmsTag == null + ? null + : DataFetcherResult.newResult() + .data(TagMapper.map(context, gmsTag)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Tags", e); } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - - final List tagUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map tagMap = _entityClient.batchGetV2(TAG_ENTITY_NAME, new HashSet<>(tagUrns), - null, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : tagUrns) { - gmsResults.add(tagMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsTag -> gmsTag == null ? null - : DataFetcherResult.newResult() - .data(TagMapper.map(gmsTag)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Tags", e); - } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), + "tag", + query, + facetFilters, + start, + count); + return UrnSearchResultsMapper.map(context, searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(context.getOperationContext(), "tag", query, filters, limit); + return AutoCompleteResultsMapper.map(context, result); + } + + @Override + public Tag update( + @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(input, context)) { + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + TagUpdateInputMapper.map(context, input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + try { + _entityClient.batchIngestProposals(context.getOperationContext(), proposals, false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("tag", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.TAG_PRIVILEGES.getResourceType(), + update.getUrn(), + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - - @Override - public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = TagUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.TAG_PRIVILEGES.getResourceType(), - update.getUrn(), - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDescription() != null || updateInput.getName() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (updateInput.getDescription() != null || updateInput.getName() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index f4d5f0a549a0ed..cadeef99410345 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -3,36 +3,49 @@ import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Tag; - -import javax.annotation.Nonnull; +import java.util.Optional; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GlobalTagsMapper { - public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + + public static com.linkedin.datahub.graphql.generated.GlobalTags map( + @Nullable final QueryContext context, + @Nonnull final GlobalTags standardTags, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, standardTags, entityUrn); + } - public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(standardTags, entityUrn); - } + public com.linkedin.datahub.graphql.generated.GlobalTags apply( + @Nullable final QueryContext context, + @Nonnull final GlobalTags input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlobalTags result = + new com.linkedin.datahub.graphql.generated.GlobalTags(); + result.setTags( + input.getTags().stream() + .map(tag -> mapTagAssociation(context, tag, entityUrn)) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())); + return result; + } - public com.linkedin.datahub.graphql.generated.GlobalTags apply(@Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); - result.setTags(input.getTags().stream().map(tag -> this.mapTagAssociation(tag, entityUrn)).collect(Collectors.toList())); - return result; - } + private static Optional mapTagAssociation( + @Nullable final QueryContext context, + @Nonnull final TagAssociation input, + @Nonnull final Urn entityUrn) { - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); - final Tag resultTag = new Tag(); - resultTag.setUrn(input.getTag().toString()); - result.setTag(resultTag); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + final com.linkedin.datahub.graphql.generated.TagAssociation result = + new com.linkedin.datahub.graphql.generated.TagAssociation(); + final Tag resultTag = new Tag(); + resultTag.setUrn(input.getTag().toString()); + result.setTag(resultTag); + result.setAssociatedUrn(entityUrn.toString()); + return Optional.of(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 775c123070a80d..cb024fd6953f25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -2,29 +2,35 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class TagAssociationUpdateMapper implements ModelMapper { +public class TagAssociationUpdateMapper + implements ModelMapper { - public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); + public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); - } + public static TagAssociation map( + @Nullable final QueryContext context, + @Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(context, tagAssociationUpdate); + } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { - final TagAssociation output = new TagAssociation(); - try { - output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to update tag with urn %s, invalid urn", - tagAssociationUpdate.getTag().getUrn())); - } - return output; + public TagAssociation apply( + @Nullable final QueryContext context, final TagAssociationUpdate tagAssociationUpdate) { + final TagAssociation output = new TagAssociation(); + try { + output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to update tag with urn %s, invalid urn", + tagAssociationUpdate.getTag().getUrn())); } - + return output; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index 43736b412b0045..d7971d1788c037 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; @@ -15,64 +18,66 @@ import com.linkedin.metadata.key.TagKey; import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; - -import static com.linkedin.metadata.Constants.*; - +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class TagMapper implements ModelMapper { - public static final TagMapper INSTANCE = new TagMapper(); + public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { - final Tag result = new Tag(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.TAG); + public static Tag map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); + } - final String legacyName = entityResponse.getUrn().getId(); - result.setName(legacyName); + @Override + public Tag apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + final Tag result = new Tag(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.TAG); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (tag, dataMap) -> - tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + final String legacyName = entityResponse.getUrn().getId(); + result.setName(legacyName); - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, TagMapper::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, TagMapper::mapTagProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (tag, dataMap) -> + tag.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); - return mappingHelper.getResult(); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - TagKey tagKey = new TagKey(dataMap); - tag.setName(tagKey.getName()); - } + return mappingHelper.getResult(); + } + + private static void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + TagKey tagKey = new TagKey(dataMap); + tag.setName(tagKey.getName()); + } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - final TagProperties properties = new TagProperties(dataMap); - final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = - new com.linkedin.datahub.graphql.generated.TagProperties.Builder() - .setColorHex(properties.getColorHex(GetMode.DEFAULT)) - .setName(properties.getName(GetMode.DEFAULT)) - .setDescription(properties.getDescription(GetMode.DEFAULT)) - .build(); - tag.setProperties(graphQlProperties); - // Set deprecated top-level description field. - if (properties.hasDescription()) { - tag.setDescription(properties.getDescription()); - } + private static void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + final TagProperties properties = new TagProperties(dataMap); + final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = + new com.linkedin.datahub.graphql.generated.TagProperties.Builder() + .setColorHex(properties.getColorHex(GetMode.DEFAULT)) + .setName(properties.getName(GetMode.DEFAULT)) + .setDescription(properties.getDescription(GetMode.DEFAULT)) + .build(); + tag.setProperties(graphQlProperties); + // Set deprecated top-level description field. + if (properties.hasDescription()) { + tag.setDescription(properties.getDescription()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 505dd0d36954b3..7e6b7052d683d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -10,6 +13,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; @@ -18,23 +22,23 @@ import java.util.ArrayList; import java.util.Collection; import javax.annotation.Nonnull; +import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class TagUpdateInputMapper implements InputModelMapper, Urn> { +public class TagUpdateInputMapper + implements InputModelMapper, Urn> { public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection map( + @Nullable final QueryContext context, @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { - return INSTANCE.apply(tagUpdate, actor); + return INSTANCE.apply(context, tagUpdate, actor); } @Override public Collection apply( + @Nullable final QueryContext context, @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(2); @@ -59,9 +63,10 @@ public Collection apply( TagProperties tagProperties = new TagProperties(); tagProperties.setName(tagUpdate.getName()); tagProperties.setDescription(tagUpdate.getDescription()); - proposals.add(updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); } return proposals; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java index ddc9f33b25516f..be67d174219178 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java @@ -1,15 +1,14 @@ package com.linkedin.datahub.graphql.types.test; -import com.linkedin.datahub.graphql.generated.TestDefinition; -import com.linkedin.test.TestInfo; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.TestDefinition; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; - +import com.linkedin.test.TestInfo; public class TestMapper { @@ -29,12 +28,11 @@ public static Test map(final EntityResponse entityResponse) { result.setName(testInfo.getName()); result.setDescription(testInfo.getDescription()); result.setDefinition(new TestDefinition(testInfo.getDefinition().getJson())); - } else { + } else { return null; } return result; } - private TestMapper() { - } -} \ No newline at end of file + private TestMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index 4b7df8a0d23d36..ae8202cf378cec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -3,9 +3,9 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -20,15 +20,12 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class TestType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.TEST_INFO_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = ImmutableSet.of(Constants.TEST_INFO_ASPECT_NAME); private final EntityClient _entityClient; - public TestType(final EntityClient entityClient) { + public TestType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -48,28 +45,28 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List testUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List testUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.TEST_ENTITY_NAME, - new HashSet<>(testUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + context.getOperationContext(), + Constants.TEST_ENTITY_NAME, + new HashSet<>(testUrns), + ASPECTS_TO_FETCH); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(TestMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(TestMapper.map(gmsResult)).build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tests", e); @@ -83,4 +80,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java index 7812282d0c1e52..02de39ffc644c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChangeOperationType; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -25,15 +27,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame structs for every schema +// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame +// structs for every schema // at every semantic version. @Slf4j public class SchemaBlameMapper { - public static GetSchemaBlameResult map(@Nonnull final List changeTransactions, + public static GetSchemaBlameResult map( + @Nonnull final List changeTransactions, @Nullable final String versionCutoff) { final GetSchemaBlameResult result = new GetSchemaBlameResult(); if (changeTransactions.isEmpty()) { @@ -46,7 +47,8 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - final String semanticVersionFilterString = versionCutoff == null ? latestSemanticVersionString : versionCutoff; + final String semanticVersionFilterString = + versionCutoff == null ? latestSemanticVersionString : versionCutoff; final Optional semanticVersionFilterOptional = createSemanticVersion(semanticVersionFilterString); if (semanticVersionFilterOptional.isEmpty()) { @@ -55,25 +57,30 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final ComparableVersion semanticVersionFilter = semanticVersionFilterOptional.get(); - final List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .filter(semanticVersionChangeTransactionPair -> - semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) <= 0) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + final List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .filter( + semanticVersionChangeTransactionPair -> + semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) + <= 0) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); if (reversedChangeTransactions.isEmpty()) { return result; } - final String selectedSemanticVersion = truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); + final String selectedSemanticVersion = + truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); final long selectedSemanticVersionTimestamp = reversedChangeTransactions.get(0).getTimestamp(); final String selectedVersionStamp = reversedChangeTransactions.get(0).getVersionStamp(); result.setVersion( - new SemanticVersionStruct(selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); + new SemanticVersionStruct( + selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); for (ChangeTransaction changeTransaction : reversedChangeTransactions) { for (ChangeEvent changeEvent : changeTransaction.getChangeEvents()) { @@ -90,8 +97,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch SchemaFieldKey schemaFieldKey; try { - schemaFieldKey = (SchemaFieldKey) EntityKeyUtils.convertUrnToEntityKeyInternal(Urn.createFromString(schemaUrn), - new SchemaFieldKey().schema()); + schemaFieldKey = + (SchemaFieldKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + Urn.createFromString(schemaUrn), new SchemaFieldKey().schema()); } catch (Exception e) { log.debug(String.format("Could not generate schema urn for %s", schemaUrn)); continue; @@ -101,7 +110,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch schemaFieldBlame.setFieldPath(fieldPath); final SchemaFieldChange schemaFieldChange = - getLastSchemaFieldChange(changeEvent, changeTransaction.getTimestamp(), changeTransaction.getSemVer(), + getLastSchemaFieldChange( + changeEvent, + changeTransaction.getTimestamp(), + changeTransaction.getSemVer(), changeTransaction.getVersionStamp()); schemaFieldBlame.setSchemaFieldChange(schemaFieldChange); @@ -109,15 +121,17 @@ public static GetSchemaBlameResult map(@Nonnull final List ch } } - result.setSchemaFieldBlameList(schemaBlameMap.values() - .stream() - .filter(schemaFieldBlame -> !schemaFieldBlame.getSchemaFieldChange() - .getChangeType() - .equals(ChangeOperationType.REMOVE)) - .collect(Collectors.toList())); + result.setSchemaFieldBlameList( + schemaBlameMap.values().stream() + .filter( + schemaFieldBlame -> + !schemaFieldBlame + .getSchemaFieldChange() + .getChangeType() + .equals(ChangeOperationType.REMOVE)) + .collect(Collectors.toList())); return result; } - private SchemaBlameMapper() { - } -} \ No newline at end of file + private SchemaBlameMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java index 249957b1a12621..295ca0856821c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.generated.SemanticVersionStruct; import com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils; @@ -12,10 +14,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to list of schema versions. +// Class for converting ChangeTransactions received from the Timeline API to list of schema +// versions. @Slf4j public class SchemaVersionListMapper { @@ -29,28 +29,36 @@ public static GetSchemaVersionListResult map(List changeTrans String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - long latestSemanticVersionTimestamp = changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); - String latestVersionStamp = changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); + long latestSemanticVersionTimestamp = + changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); + String latestVersionStamp = + changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); result.setLatestVersion( - new SemanticVersionStruct(latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); + new SemanticVersionStruct( + latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); - List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); - List semanticVersionStructList = reversedChangeTransactions.stream() - .map(changeTransaction -> new SemanticVersionStruct(truncateSemanticVersion(changeTransaction.getSemVer()), - changeTransaction.getTimestamp(), changeTransaction.getVersionStamp())) - .collect(Collectors.toList()); + List semanticVersionStructList = + reversedChangeTransactions.stream() + .map( + changeTransaction -> + new SemanticVersionStruct( + truncateSemanticVersion(changeTransaction.getSemVer()), + changeTransaction.getTimestamp(), + changeTransaction.getVersionStamp())) + .collect(Collectors.toList()); result.setSemanticVersionList(semanticVersionStructList); return result; } - private SchemaVersionListMapper() { - } -} \ No newline at end of file + private SchemaVersionListMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java index 175cf678117f01..37acfe3da0f9f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java @@ -9,13 +9,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class TimelineUtils { - public static Optional> semanticVersionChangeTransactionPair( - ChangeTransaction changeTransaction) { - Optional semanticVersion = createSemanticVersion(changeTransaction.getSemVer()); + public static Optional> + semanticVersionChangeTransactionPair(ChangeTransaction changeTransaction) { + Optional semanticVersion = + createSemanticVersion(changeTransaction.getSemVer()); return semanticVersion.map(version -> Pair.of(version, changeTransaction)); } @@ -29,21 +29,24 @@ public static Optional createSemanticVersion(String semanticV } } - // The SemanticVersion is currently returned from the ChangeTransactions in the format "x.y.z-computed". This function + // The SemanticVersion is currently returned from the ChangeTransactions in the format + // "x.y.z-computed". This function // removes the suffix "computed". public static String truncateSemanticVersion(String semanticVersion) { String suffix = "-computed"; - return semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + return semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; } - public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent, long timestamp, - String semanticVersion, String versionStamp) { + public static SchemaFieldChange getLastSchemaFieldChange( + ChangeEvent changeEvent, long timestamp, String semanticVersion, String versionStamp) { SchemaFieldChange schemaFieldChange = new SchemaFieldChange(); schemaFieldChange.setTimestampMillis(timestamp); schemaFieldChange.setLastSemanticVersion(truncateSemanticVersion(semanticVersion)); schemaFieldChange.setChangeType( - ChangeOperationType.valueOf(ChangeOperationType.class, changeEvent.getOperation().toString())); + ChangeOperationType.valueOf( + ChangeOperationType.class, changeEvent.getOperation().toString())); schemaFieldChange.setVersionStamp(versionStamp); String translatedChangeOperationType; @@ -65,15 +68,16 @@ public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent String suffix = "-computed"; String translatedSemanticVersion = - semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; - String lastSchemaFieldChange = String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); + String lastSchemaFieldChange = + String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); schemaFieldChange.setLastSchemaFieldChange(lastSchemaFieldChange); return schemaFieldChange; } - private TimelineUtils() { - } + private TimelineUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index 3bf84d21a32158..1bfeeaeea7c36f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -1,20 +1,26 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.FieldUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -public class FieldUsageCountsMapper implements ModelMapper { +public class FieldUsageCountsMapper + implements ModelMapper { public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); - public static FieldUsageCounts map(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { - return INSTANCE.apply(usageCounts); + public static FieldUsageCounts map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + return INSTANCE.apply(context, usageCounts); } @Override - public FieldUsageCounts apply(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public FieldUsageCounts apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { FieldUsageCounts result = new FieldUsageCounts(); result.setCount(usageCounts.getCount()); result.setFieldName(usageCounts.getFieldName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 453ae97d403067..32ba8f5b80325a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -1,22 +1,27 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregation; import com.linkedin.datahub.graphql.generated.WindowDuration; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -public class UsageAggregationMapper implements - ModelMapper { +public class UsageAggregationMapper + implements ModelMapper { public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); - public static UsageAggregation map(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { - return INSTANCE.apply(pdlUsageAggregation); + public static UsageAggregation map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + return INSTANCE.apply(context, pdlUsageAggregation); } @Override - public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public UsageAggregation apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); @@ -27,7 +32,8 @@ public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation result.setResource(pdlUsageAggregation.getResource().toString()); } if (pdlUsageAggregation.hasMetrics()) { - result.setMetrics(UsageAggregationMetricsMapper.map(pdlUsageAggregation.getMetrics())); + result.setMetrics( + UsageAggregationMetricsMapper.map(context, pdlUsageAggregation.getMetrics())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index 697b15d57e4e48..47411d65c73290 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -1,35 +1,42 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregationMetrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -public class UsageAggregationMetricsMapper implements - ModelMapper { +public class UsageAggregationMetricsMapper + implements ModelMapper { public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); - public static UsageAggregationMetrics map(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { - return INSTANCE.apply(usageAggregationMetrics); + public static UsageAggregationMetrics map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + return INSTANCE.apply(context, usageAggregationMetrics); } @Override - public UsageAggregationMetrics apply(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public UsageAggregationMetrics apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); result.setUniqueUserCount(usageAggregationMetrics.getUniqueUserCount()); result.setTopSqlQueries(usageAggregationMetrics.getTopSqlQueries()); if (usageAggregationMetrics.hasFields()) { result.setFields( - usageAggregationMetrics.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + usageAggregationMetrics.getFields().stream() + .map(f -> FieldUsageCountsMapper.map(context, f)) + .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { - result.setUsers(usageAggregationMetrics.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + usageAggregationMetrics.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) + .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index ba3b86b72af8b9..c40126ca325515 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -1,34 +1,43 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +public class UsageQueryResultAggregationMapper + implements ModelMapper< + com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { -public class UsageQueryResultAggregationMapper implements - ModelMapper { + public static final UsageQueryResultAggregationMapper INSTANCE = + new UsageQueryResultAggregationMapper(); - public static final UsageQueryResultAggregationMapper INSTANCE = new UsageQueryResultAggregationMapper(); - - public static UsageQueryResultAggregations map(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + public static UsageQueryResultAggregations map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override - public UsageQueryResultAggregations apply(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public UsageQueryResultAggregations apply( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); result.setUniqueUserCount(pdlUsageResultAggregations.getUniqueUserCount()); if (pdlUsageResultAggregations.hasFields()) { result.setFields( - pdlUsageResultAggregations.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + pdlUsageResultAggregations.getFields().stream() + .map(f -> FieldUsageCountsMapper.map(context, f)) + .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { - result.setUsers(pdlUsageResultAggregations.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + pdlUsageResultAggregations.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index f54259180c7392..eef476959c5fec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,29 +1,42 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResult; - +import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +public class UsageQueryResultMapper + implements ModelMapper { -public class UsageQueryResultMapper implements ModelMapper { + public static final UsageQueryResult EMPTY = + new UsageQueryResult(List.of(), new UsageQueryResultAggregations(0, List.of(), List.of(), 0)); public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); - public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { - return INSTANCE.apply(pdlUsageResult); + public static UsageQueryResult map( + @Nullable final QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + return INSTANCE.apply(context, pdlUsageResult); } @Override - public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public UsageQueryResult apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { - result.setAggregations(UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + result.setAggregations( + UsageQueryResultAggregationMapper.map(context, pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { - result.setBuckets(pdlUsageResult.getBuckets().stream().map( - bucket -> UsageAggregationMapper.map(bucket)).collect(Collectors.toList())); + result.setBuckets( + pdlUsageResult.getBuckets().stream() + .map(bucket -> UsageAggregationMapper.map(context, bucket)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index b525a761841e30..783d44d4863689 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; - import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; - -public class UserUsageCountsMapper implements - ModelMapper { +public class UserUsageCountsMapper + implements ModelMapper { public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); - public static UserUsageCounts map(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + public static UserUsageCounts map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override - public UserUsageCounts apply(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public UserUsageCounts apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { CorpUser partialUser = new CorpUser(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index f6c348937c7a55..be27f9b0f3c011 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; import com.linkedin.datahub.graphql.generated.DataHubViewFilter; @@ -9,8 +12,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilter; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; @@ -24,20 +27,20 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class DataHubViewMapper implements ModelMapper { private static final String KEYWORD_FILTER_SUFFIX = ".keyword"; public static final DataHubViewMapper INSTANCE = new DataHubViewMapper(); - public static DataHubView map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubView map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubView apply(@Nonnull final EntityResponse entityResponse) { + public DataHubView apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubView result = new DataHubView(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATAHUB_VIEW); @@ -57,20 +60,26 @@ private void mapDataHubViewInfo(@Nonnull final DataHubView view, @Nonnull final } @Nonnull - private DataHubViewDefinition mapViewDefinition(@Nonnull final com.linkedin.view.DataHubViewDefinition definition) { + private DataHubViewDefinition mapViewDefinition( + @Nonnull final com.linkedin.view.DataHubViewDefinition definition) { final DataHubViewDefinition result = new DataHubViewDefinition(); result.setFilter(mapFilter(definition.getFilter())); - result.setEntityTypes(definition.getEntityTypes().stream().map(EntityTypeMapper::getType).collect( - Collectors.toList())); + result.setEntityTypes( + definition.getEntityTypes().stream() + .map(EntityTypeMapper::getType) + .collect(Collectors.toList())); return result; } @Nullable - private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.filter.Filter filter) { - // This assumes that people DO NOT emit Views on their own, since we expect that the Filter structure is within + private DataHubViewFilter mapFilter( + @Nonnull final com.linkedin.metadata.query.filter.Filter filter) { + // This assumes that people DO NOT emit Views on their own, since we expect that the Filter + // structure is within // a finite set of possibilities. // - // If we find a View that was ingested manually and malformed, then we log that and return a default. + // If we find a View that was ingested manually and malformed, then we log that and return a + // default. final DataHubViewFilter result = new DataHubViewFilter(); if (filter.hasOr() && filter.getOr().size() == 1) { // Then we are looking at an AND with multiple sub conditions. @@ -84,9 +93,7 @@ private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.f return result; } - /** - * This simply converts a List of leaf criterion into the FacetFiler equivalent. - */ + /** This simply converts a List of leaf criterion into the FacetFiler equivalent. */ @Nonnull private List mapAndFilters(@Nullable final List ands) { // If the array is missing, return empty array. @@ -98,9 +105,9 @@ private List mapAndFilters(@Nullable final List ands) { } /** - * This converts a list of Conjunctive Criterion into a flattened list - * of FacetFilters. This method makes the assumption that WE (our GraphQL API) - * has minted the View and that each or criterion contains at maximum one nested condition. + * This converts a list of Conjunctive Criterion into a flattened list of FacetFilters. This + * method makes the assumption that WE (our GraphQL API) has minted the View and that each or + * criterion contains at maximum one nested condition. */ @Nonnull private List mapOrFilters(@Nullable final List ors) { @@ -109,8 +116,10 @@ private List mapOrFilters(@Nullable final List or.hasAnd() && or.getAnd().size() > 1)) { - log.warn(String.format( - "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", ors)); + log.warn( + String.format( + "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", + ors)); return Collections.emptyList(); } // It is assumed that in this case, the view is a flat list of ORs. Thus, we filter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java index 21a80e3f900d41..520f8fa5f6cc27 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubViewType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubViewType + implements com.linkedin.datahub.graphql.types.EntityType { public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,22 +44,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_VIEW_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, - context.getAuthentication()); + _entityClient.batchGetV2( + context.getOperationContext(), + DATAHUB_VIEW_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH); final List gmsResults = new ArrayList<>(); for (Urn urn : viewUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubViewMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubViewMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Views", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index bb9de5fb96802b..600db4ac04fc58 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -5,39 +5,41 @@ import org.joda.time.DateTimeConstants; public class DateUtil { - public DateTime getNow() { - return DateTime.now(); - } - - public DateTime getStartOfNextWeek() { - return setTimeToZero(getNow() - .withDayOfWeek(DateTimeConstants.SUNDAY) - .plusDays(1)); - } - - public DateTime getStartOfNextMonth() { - return setTimeToZero(getNow() - .withDayOfMonth(1) - .plusMonths(1)); - } - - public DateTime setTimeToZero(DateTime input) { - return input.withHourOfDay(0) - .withMinuteOfHour(0) - .withSecondOfMinute(0) - .withMillisOfDay(0); - } - - public DateTime getTomorrowStart() { - return setTimeToZero(getNow().plusDays(1)); - } - - public DateRange getTrailingWeekDateRange() { - final DateTime todayEnd = getTomorrowStart().minusMillis(1); - final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); - return new DateRange( - String.valueOf(aWeekAgoStart.getMillis()), - String.valueOf(todayEnd.getMillis()) - ); - } + public DateTime getNow() { + return DateTime.now(); + } + + public DateTime getStartOfNextWeek() { + return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); + } + + public DateTime getStartOfThisMonth() { + return setTimeToZero(getNow().withDayOfMonth(1)); + } + + public DateTime getStartOfNextMonth() { + return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); + } + + public DateTime setTimeToZero(DateTime input) { + return input.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfDay(0); + } + + public DateTime getTomorrowStart() { + return setTimeToZero(getNow().plusDays(1)); + } + + public DateRange getTrailingWeekDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); + return new DateRange( + String.valueOf(aWeekAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } + + public DateRange getTrailingMonthDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aMonthAgoStart = todayEnd.minusMonths(1).plusMillis(1); + return new DateRange( + String.valueOf(aMonthAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java index 7f90071c6770cc..904db311d34d06 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nullable; - public class SearchInsightsUtil { public static List getInsightsFromFeatures(@Nullable final DoubleMap features) { @@ -18,5 +17,5 @@ public static List getInsightsFromFeatures(@Nullable final Double return Collections.emptyList(); } - private SearchInsightsUtil() { } + private SearchInsightsUtil() {} } diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 075a3b0fac43bc..262d2384d84ada 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -17,6 +17,11 @@ extend type Query { Requires the 'Manage Global Views' Platform Privilege. """ globalViewsSettings: GlobalViewsSettings + + """ + Fetch the global settings related to the docs propagation feature. + """ + docPropagationSettings: DocPropagationSettings } extend type Mutation { @@ -25,6 +30,11 @@ extend type Mutation { Requires the 'Manage Global Views' Platform Privilege. """ updateGlobalViewsSettings(input: UpdateGlobalViewsSettingsInput!): Boolean! + + """ + Update the doc propagation settings. + """ + updateDocPropagationSettings(input: UpdateDocPropagationSettingsInput!): Boolean! } """ @@ -91,6 +101,11 @@ type PlatformPrivileges { """ manageTokens: Boolean! + """ + Whether the user is able to view Tests + """ + viewTests: Boolean! + """ Whether the user is able to manage Tests """ @@ -130,6 +145,17 @@ type PlatformPrivileges { Whether the user can create and delete posts pinned to the home page. """ manageGlobalAnnouncements: Boolean! + + """ + Whether the user can create Business Attributes. + """ + createBusinessAttributes: Boolean! + + """ + Whether the user can manage Business Attributes. + """ + manageBusinessAttributes: Boolean! + } """ @@ -212,6 +238,16 @@ type VisualConfig { """ faviconUrl: String + """ + Custom app title to show in the browser tab + """ + appTitle: String + + """ + Boolean flag disabling viewing the Business Glossary page for users without the 'Manage Glossaries' privilege + """ + hideGlossary: Boolean + """ Configuration for the queries tab """ @@ -438,9 +474,18 @@ type FeatureFlagsConfig { showBrowseV2: Boolean! """ - Whether we should show CTAs in the UI related to moving to Managed DataHub by Acryl. + Whether browse v2 is platform mode, which means that platforms are displayed instead of entity types at the root. + """ + platformBrowseV2: Boolean! + + """ + Whether we should show CTAs in the UI related to moving to DataHub Cloud by Acryl. """ showAcrylInfo: Boolean! + """ + Whether ERModelRelationship Tables Feature should be shown. + """ + erModelRelationshipFeatureEnabled: Boolean! """ Whether we should show AccessManagement tab in the datahub UI. @@ -452,6 +497,26 @@ type FeatureFlagsConfig { If this is off, Domains appear "flat" again. """ nestedDomainsEnabled: Boolean! + + """ + Whether business attribute entity should be shown + """ + businessAttributeEntityEnabled: Boolean! + + """ + Whether data contracts should be enabled + """ + dataContractsEnabled: Boolean! + + """ + Whether dataset names are editable + """ + editableDatasetNameEnabled: Boolean! + + """ + If turned on, all siblings will be separated with no way to get to a "combined" sibling view + """ + showSeparateSiblings: Boolean! } """ @@ -475,3 +540,23 @@ type GlobalViewsSettings { """ defaultView: String } + +""" +Input required to update doc propagation settings. +""" +input UpdateDocPropagationSettingsInput { + """ + The default doc propagation setting for the platform. + """ + docColumnPropagation: Boolean +} + +""" +Global (platform-level) settings related to the doc propagation feature +""" +type DocPropagationSettings { + """ + The default doc propagation setting for the platform. + """ + docColumnPropagation: Boolean +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/assertions.graphql b/datahub-graphql-core/src/main/resources/assertions.graphql new file mode 100644 index 00000000000000..ff182089ad7ff6 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/assertions.graphql @@ -0,0 +1,1066 @@ +extend type Mutation { + """ + Upsert a Custom Assertion + """ + upsertCustomAssertion( + """ + Urn of custom assertion. If not provided, one will be generated. + """ + urn: String + + """ + Input for upserting a custom assertion. + """ + input: UpsertCustomAssertionInput! + ): Assertion! + + """ + Report result for an assertion + """ + reportAssertionResult( + """ + Urn of custom assertion. + """ + urn: String! + + """ + Input for reporting result of the assertion + """ + result: AssertionResultInput! + ): Boolean! +} + +""" +Input for upserting a Custom Assertion. +""" +input UpsertCustomAssertionInput { + """ + The entity targeted by this assertion. + """ + entityUrn: String! + + """ + The type of the custom assertion. + """ + type: String! + + """ + The description of this assertion. + """ + description: String! + + """ + The dataset field targeted by this assertion, if any. + """ + fieldPath: String + + """ + The external Platform associated with the assertion + """ + platform: PlatformInput! + + """ + Native platform URL of the Assertion + """ + externalUrl: String + + """ + Logic comprising a raw, unstructured assertion. for example - custom SQL query for the assertion. + """ + logic: String + +} + +""" +Input for reporting result of the assertion +""" +input AssertionResultInput { + """ + Optional: Provide a timestamp associated with the run event. If not provided, one will be generated for you based + on the current time. + """ + timestampMillis: Long + + """ + The final result of assertion, e.g. either SUCCESS or FAILURE. + """ + type: AssertionResultType! + + """ + Additional metadata representing about the native results of the assertion. + These will be displayed alongside the result. + It should be used to capture additional context that is useful for the user. + """ + properties: [StringMapEntryInput!] + + """ + Native platform URL of the Assertion Run Event + """ + externalUrl: String + + """ + Error details, if type is ERROR + """ + error: AssertionResultErrorInput +} + +""" +Input for reporting an Error during Assertion Run +""" +input AssertionResultErrorInput { + """ + The type of error encountered + """ + type: AssertionResultErrorType! + + """ + The error message with details of error encountered + """ + message: String! +} +""" +Input representing A Data Platform +""" +input PlatformInput { + """ + Urn of platform + """ + urn: String + + """ + Name of platform + """ + name: String +} + +""" +Defines a schema field, each with a specified path and type. +""" +type SchemaAssertionField { + """ + The standard V1 path of the field within the schema. + """ + path: String! + + """ + The std type of the field + """ + type: SchemaFieldDataType! + + """ + Optional: The specific native or standard type of the field. + """ + nativeType: String +} + +""" +Defines the required compatibility level for the schema assertion to pass. +""" +enum SchemaAssertionCompatibility { + """ + The schema must be exactly the same as the expected schema. + """ + EXACT_MATCH + + """ + The schema must be a superset of the expected schema. + """ + SUPERSET + + """ + The schema must be a subset of the expected schema. + """ + SUBSET +} + +""" +The source of an assertion +""" +enum AssertionSourceType { + """ + The assertion was defined natively on DataHub by a user. + """ + NATIVE + """ + The assertion was defined and managed externally of DataHub. + """ + EXTERNAL + """ + The assertion was inferred, e.g. from offline AI / ML models. + """ + INFERRED +} + +""" +The type of an Freshness assertion +""" +enum FreshnessAssertionType { + """ + An assertion defined against a Dataset Change Operation - insert, update, delete, etc + """ + DATASET_CHANGE + """ + An assertion defined against a Data Job run + """ + DATA_JOB_RUN +} + +extend type AssertionInfo { + """ + Information about an Freshness Assertion + """ + freshnessAssertion: FreshnessAssertionInfo + + """ + Information about an Volume Assertion + """ + volumeAssertion: VolumeAssertionInfo + + """ + Information about a SQL Assertion + """ + sqlAssertion: SqlAssertionInfo + + """ + Information about a Field Assertion + """ + fieldAssertion: FieldAssertionInfo + + """ + Schema assertion, e.g. defining the expected structure for an asset. + """ + schemaAssertion: SchemaAssertionInfo + + """ + Information about Custom assertion + """ + customAssertion: CustomAssertionInfo + + """ + The source or origin of the Assertion definition. + """ + source: AssertionSource + + """ + The time that the status last changed and the actor who changed it + """ + lastUpdated: AuditStamp +} + +extend type Assertion { + """ + The actions associated with the Assertion + """ + actions: AssertionActions +} + +""" +Some actions associated with an assertion +""" +type AssertionActions { + """ + Actions to be executed on successful assertion run. + """ + onSuccess: [AssertionAction!]! + + """ + Actions to be executed on failed assertion run. + """ + onFailure: [AssertionAction!]! +} + +""" +An action associated with an assertion +""" +type AssertionAction { + """ + The type of the actions + """ + type: AssertionActionType! +} + + +""" +The type of the Action +""" +enum AssertionActionType { + """ + Raise an incident. + """ + RAISE_INCIDENT + """ + Resolve open incidents related to the assertion. + """ + RESOLVE_INCIDENT +} + + +""" +Information about an Freshness assertion. +""" +type FreshnessAssertionInfo { + """ + The urn of the entity that the Freshness assertion is related to + """ + entityUrn: String! + + """ + The type of the Freshness Assertion + """ + type: FreshnessAssertionType! + + """ + Produce FAIL Assertion Result if the asset is not updated on the cadence and within the time range described by the schedule. + """ + schedule: FreshnessAssertionSchedule! + + """ + A filter applied when querying an external Dataset or Table + """ + filter: DatasetFilter +} + +""" +Attributes defining a single Freshness schedule. +""" +type FreshnessAssertionSchedule { + """ + The type of schedule + """ + type: FreshnessAssertionScheduleType! + + """ + A cron schedule. This is populated if the type is CRON. + """ + cron: FreshnessCronSchedule + + """ + A fixed interval schedule. This is populated if the type is FIXED_INTERVAL. + """ + fixedInterval: FixedIntervalSchedule +} + +""" +The type of an Freshness assertion +""" +enum FreshnessAssertionScheduleType { + """ + An schedule based on a CRON schedule representing the expected event times. + """ + CRON + + """ + A scheduled based on a recurring fixed schedule which is used to compute the expected operation window. E.g. "every 24 hours". + """ + FIXED_INTERVAL + + """ + A schedule computed based on when the assertion was last evaluated, to the current moment in time. + """ + SINCE_THE_LAST_CHECK +} + +""" +A cron-formatted schedule +""" +type FreshnessCronSchedule { + """ + A cron-formatted execution interval, as a cron string, e.g. 1 * * * * + """ + cron: String! + + """ + Timezone in which the cron interval applies, e.g. America/Los Angeles + """ + timezone: String! + + """ + An optional offset in milliseconds to SUBTRACT from the timestamp generated by the cron schedule + to generate the lower bounds of the "Freshness window", or the window of time in which an event must have occurred in order for the Freshness + to be considering passing. + If left empty, the start of the Freshness window will be the _end_ of the previously evaluated Freshness window. + """ + windowStartOffsetMs: Long +} + +""" +A fixed interval schedule. +""" +type FixedIntervalSchedule { + """ + Interval unit such as minute/hour/day etc. + """ + unit: DateInterval! + + """ + How many units. Defaults to 1. + """ + multiple: Int! +} + +""" +The source of an Assertion +""" +type AssertionSource { + """ + The source type + """ + type: AssertionSourceType! + """ + The time at which the assertion was initially created and the actor who created it + """ + created: AuditStamp +} + +""" +Information about the field to use in an assertion +""" +type SchemaFieldSpec { + """ + The field path + """ + path: String! + + """ + The DataHub standard schema field type. + """ + type: String! + + """ + The native field type + """ + nativeType: String! +} + +""" +An enum to represent a type of change in an assertion value, metric, or measurement. +""" +enum AssertionValueChangeType { + """ + A change that is defined in absolute terms. + """ + ABSOLUTE + + """ + A change that is defined in relative terms using percentage change + from the original value. + """ + PERCENTAGE +} + +""" +A type of volume (row count) assertion +""" +enum VolumeAssertionType { + """ + A volume assertion that is evaluated against the total row count of a dataset. + """ + ROW_COUNT_TOTAL + + """ + A volume assertion that is evaluated against an incremental row count of a dataset, + or a row count change. + """ + ROW_COUNT_CHANGE + + """ + A volume assertion that checks the latest "segment" in a table based on an incrementing + column to check whether it's row count falls into a particular range. + This can be used to monitor the row count of an incrementing date-partition column segment. + """ + INCREMENTING_SEGMENT_ROW_COUNT_TOTAL + + """ + A volume assertion that compares the row counts in neighboring "segments" or "partitions" + of an incrementing column. This can be used to track changes between subsequent date partition + in a table, for example. + """ + INCREMENTING_SEGMENT_ROW_COUNT_CHANGE +} + +""" +Attributes defining an ROW_COUNT_TOTAL volume assertion. +""" +type RowCountTotal { + """ + The operator you'd like to apply. + Note that only numeric operators are valid inputs: + GREATER_THAN, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, LESS_THAN, LESS_THAN_OR_EQUAL_TO, + BETWEEN. + """ + operator: AssertionStdOperator! + + """ + The parameters you'd like to provide as input to the operator. + Note that only numeric parameter types are valid inputs: NUMBER. + """ + parameters: AssertionStdParameters! +} + +""" +Attributes defining an ROW_COUNT_CHANGE volume assertion. +""" +type RowCountChange { + """ + The type of the value used to evaluate the assertion: a fixed absolute value or a relative percentage. + """ + type: AssertionValueChangeType! + + """ + The operator you'd like to apply. + Note that only numeric operators are valid inputs: + GREATER_THAN, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, LESS_THAN, LESS_THAN_OR_EQUAL_TO, + BETWEEN. + """ + operator: AssertionStdOperator! + + """ + The parameters you'd like to provide as input to the operator. + Note that only numeric parameter types are valid inputs: NUMBER. + """ + parameters: AssertionStdParameters! +} + +""" +Attributes defining an INCREMENTING_SEGMENT_ROW_COUNT_TOTAL volume assertion. +""" +type IncrementingSegmentRowCountTotal { + """ + A specification of how the 'segment' can be derived using a column and an optional transformer function. + """ + segment: IncrementingSegmentSpec! + + """ + The operator you'd like to apply. + Note that only numeric operators are valid inputs: + GREATER_THAN, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, LESS_THAN, LESS_THAN_OR_EQUAL_TO, + BETWEEN. + """ + operator: AssertionStdOperator! + + """ + The parameters you'd like to provide as input to the operator. + Note that only numeric parameter types are valid inputs: NUMBER. + """ + parameters: AssertionStdParameters! +} + +""" +Attributes defining an INCREMENTING_SEGMENT_ROW_COUNT_CHANGE volume assertion. +""" +type IncrementingSegmentRowCountChange { + """ + A specification of how the 'segment' can be derived using a column and an optional transformer function. + """ + segment: IncrementingSegmentSpec! + + """ + The type of the value used to evaluate the assertion: a fixed absolute value or a relative percentage. + """ + type: AssertionValueChangeType! + + """ + The operator you'd like to apply to the row count value + Note that only numeric operators are valid inputs: + GREATER_THAN, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, LESS_THAN, LESS_THAN_OR_EQUAL_TO, + BETWEEN. + """ + operator: AssertionStdOperator! + + """ + The parameters you'd like to provide as input to the operator. + Note that only numeric parameter types are valid inputs: NUMBER. + """ + parameters: AssertionStdParameters! +} + +""" +Core attributes required to identify an incrementing segment in a table. This type is mainly useful +for tables that constantly increase with new rows being added on a particular cadence (e.g. fact or event tables). + +An incrementing segment represents a logical chunk of data which is INSERTED +into a dataset on a regular interval, along with the presence of a constantly-incrementing column +value such as an event time, date partition, or last modified column. + +An incrementing segment is principally identified by 2 key attributes combined: + +1. A field or column that represents the incrementing value. New rows that are inserted will be identified using this column. + Note that the value of this column may not by itself represent the "bucket" or the "segment" in which the row falls. + +2. [Optional] An transformer function that may be applied to the selected column value in order + to obtain the final "segment identifier" or "bucket identifier". Rows that have the same value after applying the transformation + will be grouped into the same segment, using which the final value (e.g. row count) will be determined. +""" +type IncrementingSegmentSpec { + """ + The field to use to generate segments. It must be constantly incrementing as new rows are inserted. + """ + field: SchemaFieldSpec! + + """ + Optional transformer function to apply to the field in order to obtain the final segment or bucket identifier. + If not provided, then no operator will be applied to the field. (identity function) + """ + transformer: IncrementingSegmentFieldTransformer +} + +""" +The definition of the transformer function that should be applied to a given field / column value in a dataset +in order to determine the segment or bucket that it belongs to, which in turn is used to evaluate +volume assertions. +""" +type IncrementingSegmentFieldTransformer { + """ + The 'standard' operator type. Note that not all source systems will support all operators. + """ + type: IncrementingSegmentFieldTransformerType! + + """ + The 'native' transformer type, useful as a back door if a custom transformer is required. + This field is required if the type is NATIVE. + """ + nativeType: String +} + +""" +The 'standard' transformer type. Note that not all source systems will support all operators. +""" +enum IncrementingSegmentFieldTransformerType { + """ + Rounds a timestamp (in seconds) down to the start of the month. + """ + TIMESTAMP_MS_TO_MINUTE + + """ + Rounds a timestamp (in milliseconds) down to the nearest hour. + """ + TIMESTAMP_MS_TO_HOUR + + """ + Rounds a timestamp (in milliseconds) down to the start of the day. + """ + TIMESTAMP_MS_TO_DATE + + """ + Rounds a timestamp (in milliseconds) down to the start of the month + """ + TIMESTAMP_MS_TO_MONTH + + """ + Rounds a timestamp (in milliseconds) down to the start of the year + """ + TIMESTAMP_MS_TO_YEAR + + """ + Rounds a numeric value down to the nearest integer. + """ + FLOOR + + """ + Rounds a numeric value up to the nearest integer. + """ + CEILING + + """ + A backdoor to provide a native operator type specific to a given source system like + Snowflake, Redshift, BQ, etc. + """ + NATIVE +} + +""" +A definition of a Volume (row count) assertion. +""" +type VolumeAssertionInfo { + """ + The entity targeted by this Volume check. + """ + entityUrn: String! + + """ + The type of the freshness assertion being monitored. + """ + type: VolumeAssertionType! + + """ + Produce FAILURE Assertion Result if the row count of the asset does not meet specific requirements. + Required if type is 'ROW_COUNT_TOTAL'. + """ + rowCountTotal: RowCountTotal + + """ + Produce FAILURE Assertion Result if the row count delta of the asset does not meet specific requirements. + Required if type is 'ROW_COUNT_CHANGE'. + """ + rowCountChange: RowCountChange + + """ + Produce FAILURE Assertion Result if the latest incrementing segment row count total of the asset + does not meet specific requirements. Required if type is 'INCREMENTING_SEGMENT_ROW_COUNT_TOTAL'. + """ + incrementingSegmentRowCountTotal: IncrementingSegmentRowCountTotal + + """ + Produce FAILURE Assertion Result if the incrementing segment row count delta of the asset + does not meet specific requirements. Required if type is 'INCREMENTING_SEGMENT_ROW_COUNT_CHANGE'. + """ + incrementingSegmentRowCountChange: IncrementingSegmentRowCountChange + + """ + A definition of the specific filters that should be applied, when performing monitoring. + If not provided, there is no filter, and the full table is under consideration. + """ + filter: DatasetFilter +} + +""" +The type of the SQL assertion being monitored. +""" +enum SqlAssertionType { + """ + A SQL Metric Assertion, e.g. one based on a numeric value returned by an arbitrary SQL query. + """ + METRIC + + """ + A SQL assertion that is evaluated against the CHANGE in a metric assertion over time. + """ + METRIC_CHANGE +} + +""" +Attributes defining a SQL Assertion +""" +type SqlAssertionInfo { + """ + The type of the SQL assertion being monitored. + """ + type: SqlAssertionType! + + """ + The entity targeted by this SQL check. + """ + entityUrn: String! + + """ + The SQL statement to be executed when evaluating the assertion. + """ + statement: String! + + """ + The type of the value used to evaluate the assertion: a fixed absolute value or a relative percentage. + Required if the type is METRIC_CHANGE. + """ + changeType: AssertionValueChangeType + + """ + The operator you'd like to apply to the result of the SQL query. + """ + operator: AssertionStdOperator! + + """ + The parameters you'd like to provide as input to the operator. + """ + parameters: AssertionStdParameters! +} + +""" +The type of a Field assertion +""" +enum FieldAssertionType { + """ + An assertion used to validate the values contained with a field / column given a set of rows. + """ + FIELD_VALUES + + """ + An assertion used to validate the value of a common field / column metric (e.g. aggregation) + such as null count + percentage, min, max, median, and more. + """ + FIELD_METRIC +} + +""" +The type of the Field Transform +""" +enum FieldTransformType { + """ + Obtain the length of a string field / column (applicable to string types) + """ + LENGTH +} + +""" +The type of failure threshold. +""" +enum FieldValuesFailThresholdType { + """ + The maximum number of column values (i.e. rows) that are allowed + to fail the defined expectations before the assertion officially fails. + """ + COUNT + + """ + The maximum percentage of rows that are allowed + to fail the defined column expectations before the assertion officially fails. + """ + PERCENTAGE +} + +""" +A standard metric that can be derived from the set of values +for a specific field / column of a dataset / table. +""" +enum FieldMetricType { + """ + The number of unique values found in the column value set + """ + UNIQUE_COUNT + + """ + The percentage of unique values to total rows for the dataset + """ + UNIQUE_PERCENTAGE + + """ + The number of null values found in the column value set + """ + NULL_COUNT + + """ + The percentage of null values to total rows for the dataset + """ + NULL_PERCENTAGE + + """ + The minimum value in the column set (applies to numeric columns) + """ + MIN + + """ + The maximum value in the column set (applies to numeric columns) + """ + MAX + + """ + The mean length found in the column set (applies to numeric columns) + """ + MEAN + + """ + The median length found in the column set (applies to numeric columns) + """ + MEDIAN + + """ + The stddev length found in the column set (applies to numeric columns) + """ + STDDEV + + """ + The number of negative values found in the value set (applies to numeric columns) + """ + NEGATIVE_COUNT + + """ + The percentage of negative values to total rows for the dataset (applies to numeric columns) + """ + NEGATIVE_PERCENTAGE + + """ + The number of zero values found in the value set (applies to numeric columns) + """ + ZERO_COUNT + + """ + The percentage of zero values to total rows for the dataset (applies to numeric columns) + """ + ZERO_PERCENTAGE + + """ + The minimum length found in the column set (applies to string columns) + """ + MIN_LENGTH + + """ + The maximum length found in the column set (applies to string columns) + """ + MAX_LENGTH + + """ + The number of empty string values found in the value set (applies to string columns). + Note: This is a completely different metric different from NULL_COUNT! + """ + EMPTY_COUNT + + """ + The percentage of empty string values to total rows for the dataset (applies to string columns). + Note: This is a completely different metric different from NULL_PERCENTAGE! + """ + EMPTY_PERCENTAGE +} + +""" +A definition of a Field (Column) assertion. +""" +type FieldAssertionInfo { + """ + The type of the field assertion being monitored. + """ + type: FieldAssertionType! + + """ + The entity targeted by this Field check. + """ + entityUrn: String! + + """ + The definition of an assertion that validates individual values of a field / column for a set of rows. + """ + fieldValuesAssertion: FieldValuesAssertion + + """ + The definition of an assertion that validates a common metric obtained about a field / column for a set of rows. + """ + fieldMetricAssertion: FieldMetricAssertion + + """ + A definition of the specific filters that should be applied, when performing monitoring. + If not provided, there is no filter, and the full table is under consideration. + """ + filter: DatasetFilter +} + +""" +A definition of a Field Values assertion. +""" +type FieldValuesAssertion { + """ + The field under evaluation. + """ + field: SchemaFieldSpec! + + """ + An optional transform to apply to field values before evaluating the operator. + """ + transform: FieldTransform + + """ + The predicate to evaluate against a single value of the field. + Depending on the operator, parameters may be required + """ + operator: AssertionStdOperator! + + """ + Standard parameters required for the assertion. + """ + parameters: AssertionStdParameters + + """ + Additional customization about when the assertion should be officially considered failing. + """ + failThreshold: FieldValuesFailThreshold! + + """ + Whether to ignore or allow nulls when running the values assertion. + """ + excludeNulls: Boolean! +} + +""" +Definition of a transform applied to the values of a column / field. +""" +type FieldTransform { + """ + The type of the field transform. + """ + type: FieldTransformType! +} + +type FieldValuesFailThreshold { + """ + The type of failure threshold. + """ + type: FieldValuesFailThresholdType! + + """ + The value of the threshold, either representing a count or percentage. + """ + value: Long! +} + +""" +A definition of a Field Metric assertion. +""" +type FieldMetricAssertion { + """ + The field under evaluation + """ + field: SchemaFieldSpec! + + """ + The specific metric to assert against. + """ + metric: FieldMetricType! + + """ + The predicate to evaluate against the metric for the field / column. + """ + operator: AssertionStdOperator! + + """ + Standard parameters required for the assertion. + """ + parameters: AssertionStdParameters +} + +""" +Information about an Schema assertion +""" +type SchemaAssertionInfo { + """ + The entity targeted by this schema assertion. + """ + entityUrn: String! + + """ + A single field in the schema assertion. + """ + fields: [SchemaAssertionField!]! + + """ + A definition of the expected structure for the asset + Deprecated! Use the simpler 'fields' instead. + """ + schema: SchemaMetadata + + """ + The compatibility level required for the assertion to pass. + """ + compatibility: SchemaAssertionCompatibility! +} + +""" +Information about a custom assertion +""" +type CustomAssertionInfo { + """ + The type of custom assertion. + """ + type: String! + + """ + The entity targeted by this custom assertion. + """ + entityUrn: String! + + """ + The field serving as input to the assertion, if any. + """ + field: SchemaFieldRef + + """ + Logic comprising a raw, unstructured assertion. + """ + logic: String +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/auth.graphql b/datahub-graphql-core/src/main/resources/auth.graphql index b76aa132c219c9..5ce26067fe58e3 100644 --- a/datahub-graphql-core/src/main/resources/auth.graphql +++ b/datahub-graphql-core/src/main/resources/auth.graphql @@ -11,6 +11,19 @@ extend type Query { List access tokens stored in DataHub. """ listAccessTokens(input: ListAccessTokenInput!): ListAccessTokenResult! + + """ + Fetches the metadata of an access token. + This is useful to debug when you have a raw token but don't know the actor. + """ + getAccessTokenMetadata(token: String!): AccessTokenMetadata! + + """ + Experimental API to debug Access for users. + Backward incompatible changes will be made without notice in the future. + Do not build on top of this API. + """ + debugAccess(userUrn: String!): DebugAccessResult! } extend type Mutation { @@ -268,4 +281,52 @@ type EntityPrivileges { Whether or not a user can update the Queries for the entity (e.g. dataset) """ canEditQueries: Boolean + + """ + Whether or not a user can update the properties for the entity (e.g. dataset) + """ + canEditProperties: Boolean +} + +""" +Experimental API result to debug Access for users. +Backward incompatible changes will be made without notice in the future. +""" +type DebugAccessResult { + """ + Roles that the user has. + """ + roles: [String!]! + + """ + Groups that the user belongs to. + """ + groups: [String!]! + + """ + List of groups that the user is assigned to AND where the group has a role. + This is a subset of the groups property. + """ + groupsWithRoles: [String!]! + + """ + Final set of roles that are coming through groups. + If not role assigned to groups, then this would be empty. + """ + rolesViaGroups: [String!]! + + """ + Union of `roles` + `rolesViaGroups` that the user has. + """ + allRoles: [String!]! + + """ + List of Policy that apply to this user directly or indirectly. + """ + policies: [String!]! + + """ + List of privileges that this user has directly or indirectly. + """ + privileges: [String!]! } diff --git a/datahub-graphql-core/src/main/resources/common.graphql b/datahub-graphql-core/src/main/resources/common.graphql new file mode 100644 index 00000000000000..bac56c97f61cf7 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/common.graphql @@ -0,0 +1,49 @@ +""" +Object containing the documentation aspect for an entity +""" +type Documentation { + """ + Structured properties on this entity + """ + documentations: [DocumentationAssociation!]! +} + +""" +Object containing the documentation aspect for an entity +""" +type DocumentationAssociation { + """ + Structured properties on this entity + """ + documentation: String! + + """ + Information about who, why, and how this metadata was applied + """ + attribution: MetadataAttribution +} + +""" +Information about who, why, and how this metadata was applied +""" +type MetadataAttribution { + """ + The time this metadata was applied + """ + time: Long! + + """ + The actor responsible for this metadata application + """ + actor: Entity! + + """ + The source of this metadata application. If propagated, this will be an action. + """ + source: Entity + + """ + Extra details about how this metadata was applied + """ + sourceDetail: [StringMapEntry!] +} diff --git a/datahub-graphql-core/src/main/resources/connection.graphql b/datahub-graphql-core/src/main/resources/connection.graphql new file mode 100644 index 00000000000000..1a7249485e69de --- /dev/null +++ b/datahub-graphql-core/src/main/resources/connection.graphql @@ -0,0 +1,130 @@ +# DataHub Connections-specific GraphQL types + +extend type Query { + """ + Get a set of connection details by URN. + This requires the 'Manage Connections' platform privilege. + Returns null if a connection with the provided urn does not exist. + """ + connection(urn: String!): DataHubConnection +} + +extend type Mutation { + """ + Upsert a particular connection. + This requires the 'Manage Connections' platform privilege. + """ + upsertConnection(input: UpsertDataHubConnectionInput!): DataHubConnection! +} + +""" +A connection between DataHub and an external Platform. +""" +type DataHubConnection implements Entity { + """ + The urn of the connection + """ + urn: String! + + """ + The standard Entity Type field + """ + type: EntityType! + + """ + The connection details + """ + details: DataHubConnectionDetails! + + """ + The external Data Platform associated with the connection + """ + platform: DataPlatform! + + """ + Not implemented! + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + + +""" +The details of the Connection +""" +type DataHubConnectionDetails { + """ + The type or format of connection + """ + type: DataHubConnectionDetailsType! + + """ + A JSON-encoded connection. Present when type is JSON. + """ + json: DataHubJsonConnection + + """ + The name for this DataHub connection + """ + name: String +} + +""" +The type of a DataHub connection +""" +enum DataHubConnectionDetailsType { + """ + A json-encoded set of connection details. + """ + JSON +} + +""" +The details of a JSON Connection +""" +type DataHubJsonConnection { + """ + The JSON blob containing the specific connection details. + """ + blob: String! +} + +""" +Input required to upsert a new DataHub connection. +""" +input UpsertDataHubConnectionInput { + """ + An optional ID to use when creating the URN of the connection. If none is provided, + a random UUID will be generated automatically. + """ + id: String + + """ + The type or format of connection + """ + type: DataHubConnectionDetailsType! + + """ + Urn of the associated platform + """ + platformUrn: String! + + """ + A JSON-encoded connection. This must be present when type is JSON. + """ + json: DataHubJsonConnectionInput + + """ + An optional name for this connection entity + """ + name: String +} + +""" +The details of a JSON Connection +""" +input DataHubJsonConnectionInput { + """ + The JSON blob containing the specific connection details. + """ + blob: String! +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/contract.graphql b/datahub-graphql-core/src/main/resources/contract.graphql new file mode 100644 index 00000000000000..27d6510c89c24a --- /dev/null +++ b/datahub-graphql-core/src/main/resources/contract.graphql @@ -0,0 +1,183 @@ +extend type Mutation { + """ + Create or update a data contract for a given dataset. Requires the "Edit Data Contract" privilege for the provided dataset. + """ + upsertDataContract(urn: String, input: UpsertDataContractInput!): DataContract! +} + +extend type Dataset { + """ + An optional Data Contract defined for the Dataset. + """ + contract: DataContract +} + +""" +A Data Contract Entity. A Data Contract is a verifiable group of assertions regarding various aspects of the data: its freshness (sla), +schema, and data quality or validity. This group of assertions represents a data owner's commitment to producing data that confirms to the agreed +upon contract. Each dataset can have a single contract. The contract can be in a "passing" or "violating" state, depending +on whether the assertions that compose the contract are passing or failing. +Note that the data contract entity is currently in early preview (beta). +""" +type DataContract implements Entity { + """ + A primary key of the data contract + """ + urn: String! + + """ + The standard entity type + """ + type: EntityType! + + """ + Properties describing the data contract + """ + properties: DataContractProperties + + """ + The status of the data contract + """ + status: DataContractStatus + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +type DataContractProperties { + """ + The urn of the related entity, e.g. the Dataset today. In the future, we may support additional contract entities. + """ + entityUrn: String! + + """ + The Freshness (SLA) portion of the contract. + As of today, it is expected that there will not be more than 1 Freshness contract. If there are, only the first will be displayed. + """ + freshness: [FreshnessContract!] + + """ + The schema / structural portion of the contract. + As of today, it is expected that there will not be more than 1 Schema contract. If there are, only the first will be displayed. + """ + schema: [SchemaContract!] + + """ + A set of data quality related contracts, e.g. table and column-level contract constraints. + """ + dataQuality: [DataQualityContract!] +} + +""" +The state of the data contract +""" +enum DataContractState { + """ + The data contract is active. + """ + ACTIVE + + """ + The data contract is pending. Note that this symbol is currently experimental. + """ + PENDING +} + +type DataContractStatus { + """ + The state of the data contract + """ + state: DataContractState! +} + +type DataQualityContract { + """ + The assertion representing the schema contract. + """ + assertion: Assertion! +} + +type SchemaContract { + """ + The assertion representing the schema contract. + """ + assertion: Assertion! +} + +type FreshnessContract { + """ + The assertion representing the Freshness contract. + """ + assertion: Assertion! +} + +""" +Input required to upsert a Data Contract entity for an asset +""" +input UpsertDataContractInput { + """ + The urn of the related entity. Dataset is the only entity type supported today. + """ + entityUrn: String! + + """ + The Freshness / Freshness portion of the contract. If not provided, this will be set to none. + For Dataset Contracts, it is expected that there will not be more than 1 Freshness contract. If there are, only the first will be displayed. + """ + freshness: [FreshnessContractInput!] + + """ + The schema / structural portion of the contract. If not provided, this will be set to none. + For Dataset Contracts, it is expected that there will not be more than 1 Schema contract. If there are, only the first will be displayed. + """ + schema: [SchemaContractInput!] + + """ + The data quality portion of the contract. If not provided, this will be set to none. + """ + dataQuality: [DataQualityContractInput!] + + """ + The state of the data contract. If not provided, it will be in ACTIVE mode by default. + """ + state: DataContractState + + """ + Optional ID of the contract you want to create. Only applicable if this is a create operation. If not provided, a random + id will be generated for you. + """ + id: String +} + +""" +Input required to create an Freshness contract +""" +input FreshnessContractInput { + """ + The assertion monitoring this part of the data contract. Assertion must be of type Freshness. + """ + assertionUrn: String! +} + +""" +Input required to create a schema contract +""" +input SchemaContractInput { + """ + The assertion monitoring this part of the data contract. Assertion must be of type Data Schema. + """ + assertionUrn: String! +} + +""" +Input required to create a data quality contract +""" +input DataQualityContractInput { + """ + The assertion monitoring this part of the data contract. Assertion must be of type Dataset, Volume, Field / Column, or Custom SQL. + """ + assertionUrn: String! +} + diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index b37a8f34fa0563..609597beee51bd 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -68,11 +68,27 @@ type Query { Fetch a Tag by primary key (urn) """ tag(urn: String!): Tag + + """ + Fetch a View by primary key (urn) + """ + view(urn: String!): DataHubView + + """ + Fetch a Form by primary key (urn) + """ + form(urn: String!): Form + """ Fetch a Role by primary key (urn) """ role(urn: String!): Role + """ + Fetch a ERModelRelationship by primary key (urn) + """ + erModelRelationship(urn: String!): ERModelRelationship + """ Fetch a Glossary Term by primary key (urn) """ @@ -231,6 +247,150 @@ type Query { Fetch a Data Platform Instance by primary key (urn) """ dataPlatformInstance(urn: String!): DataPlatformInstance + + """ + Fetch a Business Attribute by primary key (urn) + """ + businessAttribute(urn: String!): BusinessAttribute + + """ + Fetch all Business Attributes + """ + listBusinessAttributes(input: ListBusinessAttributesInput!): ListBusinessAttributesResult +} + +""" +An ERModelRelationship is a high-level abstraction that dictates what datasets fields are erModelRelationshiped. +""" +type ERModelRelationship implements EntityWithRelationships & Entity { + """ + The primary key of the role + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Unique id for the erModelRelationship + """ + id: String! + + """ + An additional set of read only properties + """ + properties: ERModelRelationshipProperties + + """ + An additional set of of read write properties + """ + editableProperties: ERModelRelationshipEditableProperties + + """ + References to internal resources related to the dataset + """ + institutionalMemory: InstitutionalMemory + + """ + Ownership metadata of the dataset + """ + ownership: Ownership + + """ + Status of the Dataset + """ + status: Status + + """ + Tags used for searching dataset + """ + tags: GlobalTags + + """ + The structured glossary terms associated with the dataset + """ + glossaryTerms: GlossaryTerms + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges + + """ + No-op required for the model + """ + lineage(input: LineageInput!): EntityLineageResult +} + +""" +Additional properties about a ERModelRelationship +""" +type ERModelRelationshipEditableProperties { + + """ + Documentation of the ERModelRelationship + """ + description: String + """ + Display name of the ERModelRelationship + """ + name: String +} + +""" +Additional properties about a ERModelRelationship +""" +type ERModelRelationshipProperties { + + """ + The name of the ERModelRelationship used in display + """ + name: String! + """ + The urn of source + """ + source: Dataset! + + """ + The urn of destination + """ + destination: Dataset! + + """ + The relationFieldMappings + """ + relationshipFieldMappings: [RelationshipFieldMapping!] + + """ + Created timestamp millis associated with the ERModelRelationship + """ + createdTime: Long + + """ + Created actor urn associated with the ERModelRelationship + """ + createdActor: Entity +} + +""" +ERModelRelationship FieldMap +""" +type RelationshipFieldMapping { + """ + left field + """ + sourceField: String! + """ + bfield + """ + destinationField: String! } """ @@ -467,6 +627,31 @@ type Mutation { """ unsetDomain(entityUrn: String!): Boolean + """ + Create a ERModelRelationship + """ + createERModelRelationship( + "Input required to create a new ERModelRelationship" + input: ERModelRelationshipUpdateInput!): ERModelRelationship + + """ + Update a ERModelRelationship + """ + updateERModelRelationship( + "The urn of the ERModelRelationship to update" + urn: String!, + "Input required to updat an existing DataHub View" + input: ERModelRelationshipUpdateInput!): Boolean + + """ + Delete a ERModelRelationship + """ + deleteERModelRelationship( + "The urn of the ERModelRelationship to delete" + urn: String!): Boolean + + + """ Sets the Deprecation status for a Metadata Entity. Requires the Edit Deprecation status privilege for an entity. """ @@ -582,6 +767,11 @@ type Mutation { """ createPost(input: CreatePostInput!): Boolean + """ + Update or edit a post + """ + updatePost(input: UpdatePostInput!): Boolean + """ Delete a post """ @@ -695,6 +885,64 @@ type Mutation { deleteOwnershipType( "Urn of the Custom Ownership Type to remove." urn: String!, deleteReferences: Boolean): Boolean + + """ + Submit a response to a prompt from a form collecting metadata on different entities. + Provide the urn of the entity you're submitting a form response as well as the required input. + """ + submitFormPrompt(urn: String!, input: SubmitFormPromptInput!): Boolean + + """ + Assign a form to different entities. This will be a patch by adding this form to the list + of forms on an entity. + """ + batchAssignForm(input: BatchAssignFormInput!): Boolean + + """ + Creates a filter for a form to apply it to certain entities. Entities that match this filter will have + a given form applied to them. + This feature is ONLY supported in DataHub Cloud. + """ + createDynamicFormAssignment(input: CreateDynamicFormAssignmentInput!): Boolean + + """ + Verifies a form on an entity when all of the required questions on the form are complete and the form + is of type VERIFICATION. + """ + verifyForm(input: VerifyFormInput!): Boolean + + """ + Create Business Attribute Api + """ + createBusinessAttribute( + "Inputs required to create a new BusinessAttribute." + input: CreateBusinessAttributeInput!): BusinessAttribute + + """ + Delete a Business Attribute by urn. + """ + deleteBusinessAttribute( + "Urn of the business attribute to remove." + urn: String!): Boolean + + """ + Update Business Attribute + """ + updateBusinessAttribute( + "The urn identifier for the Business Attribute to update." + urn: String!, + "Inputs required to create a new Business Attribute." + input: UpdateBusinessAttributeInput!): BusinessAttribute + + """ + Add Business Attribute + """ + addBusinessAttribute(input: AddBusinessAttributeInput!): Boolean + + """ + Remove Business Attribute + """ + removeBusinessAttribute(input: AddBusinessAttributeInput!): Boolean } """ @@ -756,6 +1004,11 @@ enum EntityType { """ DATA_PLATFORM + """ + The ERModelRelationship Entity + """ + ER_MODEL_RELATIONSHIP + """ The Dashboard Entity """ @@ -901,10 +1154,60 @@ enum EntityType { """ CUSTOM_OWNERSHIP_TYPE + """ + A connection to an external source. + """ + DATAHUB_CONNECTION + + """ + A DataHub incident - SaaS only + """ + INCIDENT + """" A Role from an organisation """ ROLE + + """ + A data contract + """ + DATA_CONTRACT + + """" + An structured property on entities + """ + STRUCTURED_PROPERTY + + """" + A form entity on entities + """ + FORM + + """" + A data type registered to DataHub + """ + DATA_TYPE + + """" + A type of entity registered to DataHub + """ + ENTITY_TYPE + + """" + A type of entity that is restricted to the user + """ + RESTRICTED + + """ + Another entity type - refer to a provided entity type urn. + """ + OTHER + + """ + A Business Attribute + """ + BUSINESS_ATTRIBUTE } """ @@ -912,6 +1215,11 @@ Input for the get entity counts endpoint """ input EntityCountInput { types: [EntityType!] + + """ + Optional - A View to apply when generating results + """ + viewUrn: String } """ @@ -970,6 +1278,11 @@ input RelationshipsInput { The number of results to be returned """ count: Int + + """ + Whether to include soft-deleted, related, entities + """ + includeSoftDelete: Boolean = true } """ @@ -1097,6 +1410,10 @@ type LineageRelationship { """ isManual: Boolean + """ + The paths traversed for this relationship + """ + paths: [EntityPath] } """ @@ -1279,6 +1596,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ domain: DomainAssociation + """ + The forms associated with the Dataset + """ + forms: Forms + """ The Roles and the properties to access the dataset """ @@ -1309,7 +1631,7 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { """ Assertions associated with the Dataset """ - assertions(start: Int, count: Int): EntityAssertionsResult + assertions(start: Int, count: Int, includeSoftDeleted: Boolean): EntityAssertionsResult """ Edges extending from this entity @@ -1421,6 +1743,11 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Structured properties about this Dataset + """ + structuredProperties: StructuredProperties } type RoleAssociation { @@ -1472,6 +1799,7 @@ type Role implements Entity { """ actors: Actor + isAssignedToMe: Boolean! } @@ -1491,7 +1819,7 @@ type RoleUser { type RoleProperties { """ - Name of the Role in an organisation + Name of the Role in an organisation """ name: String! @@ -1514,6 +1842,8 @@ type RoleProperties { type FineGrainedLineage { upstreams: [SchemaFieldRef!] downstreams: [SchemaFieldRef!] + query: String + transformOperation: String } """ @@ -1524,6 +1854,7 @@ type SiblingProperties { If this entity is the primary sibling among the sibling set """ isPrimary: Boolean + """ The sibling entities """ @@ -1631,7 +1962,7 @@ type VersionedDataset implements Entity { domain: DomainAssociation """ - Experimental! The resolved health status of the Dataset + Experimental! The resolved health status of the asset """ health: [Health!] @@ -1697,6 +2028,12 @@ input AspectParams { Only fetch auto render aspects """ autoRenderOnly: Boolean + + """ + Fetch using aspect names + If absent, returns all aspects matching other inputs + """ + aspectNames: [String!] } @@ -1789,12 +2126,13 @@ type DatasetProperties { """ Last Modified timestamp millis associated with the Dataset """ - lastModified: Long + lastModified: AuditStamp! """ - Actor associated with the Dataset's lastModified timestamp + Actor associated with the Dataset's lastModified timestamp. + Deprecated - Use lastModified.actor instead. """ - lastModifiedActor: String + lastModifiedActor: String @deprecated } @@ -1904,6 +2242,22 @@ type GlossaryTerm implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -2041,6 +2395,22 @@ type GlossaryNode implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -2070,6 +2440,11 @@ type GlossaryNodeProperties { Description of the glossary term """ description: String + + """ + Custom properties of the Glossary Node + """ + customProperties: [CustomPropertiesEntry!] } """ @@ -2336,6 +2711,11 @@ enum FabricType { Designates corporation fabrics """ CORP + + """ + Designates review fabrics + """ + RVW } """ @@ -2441,12 +2821,33 @@ type Container implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean -} -""" -Read-only properties that originate in the source data platform -""" -type ContainerProperties { + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges +} + +""" +Read-only properties that originate in the source data platform +""" +type ContainerProperties { """ Display name of the Container """ @@ -2816,10 +3217,40 @@ type SchemaFieldEntity implements Entity { """ parent: Entity! + """ + Structured properties on this schema field + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + """ Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Business Attribute associated with the field + """ + businessAttributes: BusinessAttributes + + """ + Documentation aspect for this schema field + """ + documentation: Documentation +} + +""" +Object containing structured properties for an entity +""" +type StructuredProperties { + """ + Structured properties on this entity + """ + properties: [StructuredPropertiesEntry!] } """ @@ -2891,6 +3322,16 @@ type SchemaField { Whether the field is part of a partitioning key schema """ isPartitioningKey: Boolean + + """ + For schema fields that have other properties that are not modeled explicitly, represented as a JSON string. + """ + jsonProps: String + + """ + Schema field entity that exist in the database for this schema field + """ + schemaFieldEntity: SchemaFieldEntity } """ @@ -2932,6 +3373,7 @@ type EditableSchemaFieldInfo { Glossary terms associated with the field """ glossaryTerms: GlossaryTerms + } """ @@ -3018,6 +3460,12 @@ type ViewProperties { """ logic: String! + """ + A formatted version of the logic associated with the view. + For dbt, this contains the compiled SQL. + """ + formattedLogic: String + """ The language in which the view logic is written, for example SQL """ @@ -3034,6 +3482,11 @@ type DatasetEditableProperties { Description of the Dataset """ description: String + + """ + Editable name of the Dataset + """ + name: String } """ @@ -3429,10 +3882,36 @@ type CorpUser implements Entity { """ globalTags: GlobalTags @deprecated + """ + Whether or not this entity exists on DataHub + """ + exists: Boolean + """ Settings that a user can customize through the datahub ui """ settings: CorpUserSettings + + """ + Experimental API. + For fetching extra aspects that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -3686,6 +4165,16 @@ type CorpUserEditableProperties { Email address for the user """ email: String + + """ + User persona, if present + """ + persona: DataHubPersona + + """ + Platforms commonly used by the user, if present. + """ + platforms: [DataPlatform!] } """ @@ -3736,6 +4225,16 @@ input CorpUserUpdateInput { Email address for the user """ email: String + + """ + The platforms that the user frequently works with + """ + platformUrns: [String!] + + """ + The user's persona urn" + """ + personaUrn: String } """ @@ -3793,6 +4292,27 @@ type CorpGroup implements Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -3877,6 +4397,11 @@ type CorpGroupEditableProperties { Email address for the group """ email: String + + """ + A URL which points to a picture which user wants to set as a profile photo + """ + pictureLink: String } """ @@ -3897,6 +4422,11 @@ input CorpGroupUpdateInput { Email address for the group """ email: String + + """ + A URL which points to a picture which user wants to set as a profile photo + """ + pictureLink: String } """ @@ -3994,6 +4524,12 @@ type Tag implements Entity { Deprecated, use properties.description field instead """ description: String @deprecated + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4088,6 +4624,11 @@ type GlossaryTermAssociation { """ term: GlossaryTerm! + """ + The actor who is responsible for the term being added" + """ + actor: CorpUser + """ Reference back to the associated urn for tracking purposes e.g. when sibling nodes are merged together """ @@ -4314,6 +4855,25 @@ input DatasetEditablePropertiesUpdate { Writable description aka documentation for a Dataset """ description: String! + """ + Editable name of the Dataset + """ + name: String +} + +""" +Update to writable Dataset fields +""" +input ERModelRelationshipEditablePropertiesUpdate { + """ + Display name of the ERModelRelationship + """ + name: String + + """ + Writable description for ERModelRelationship + """ + description: String! } """ @@ -4434,6 +4994,68 @@ input CreateTagInput { description: String } +""" +Input required to create/update a new ERModelRelationship +""" +input ERModelRelationshipUpdateInput { + """ + Details about the ERModelRelationship + """ + properties: ERModelRelationshipPropertiesInput + """ + Update to editable properties + """ + editableProperties: ERModelRelationshipEditablePropertiesUpdate +} + +""" +Details about the ERModelRelationship +""" +input ERModelRelationshipPropertiesInput { + """ + Details about the ERModelRelationship + """ + name: String! + """ + Details about the ERModelRelationship + """ + source: String! + """ + Details about the ERModelRelationship + """ + destination: String! + """ + Details about the ERModelRelationship + """ + relationshipFieldmappings: [RelationshipFieldMappingInput!] + """ + optional flag about the ERModelRelationship is getting create + """ + created: Boolean + """ + optional field to prevent created time while the ERModelRelationship is getting update + """ + createdAt: Long + """ + optional field to prevent create actor while the ERModelRelationship is getting update + """ + createdBy: String +} + +""" +Details about the ERModelRelationship +""" +input RelationshipFieldMappingInput { + """ + Details about the ERModelRelationship + """ + sourceField: String + """ + Details about the ERModelRelationship + """ + destinationField: String +} + """ An update for the ownership information for a Metadata Entity """ @@ -4624,6 +5246,12 @@ type Notebook implements Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -4944,6 +5572,27 @@ type Dashboard implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5249,6 +5898,32 @@ type Chart implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5606,6 +6281,32 @@ type DataFlow implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -5684,6 +6385,11 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { """ type: EntityType! + """ + Sub Types that this entity implements + """ + subTypes: SubTypes + """ The timestamp for the last time this entity was ingested """ @@ -5801,6 +6507,27 @@ type DataJob implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + Experimental! The resolved health statuses of the asset + """ + health: [Health!] + + """ + The forms associated with the Dataset + """ + forms: Forms } """ @@ -5873,7 +6600,12 @@ type DataProcessRunEvent implements TimeSeriesAspect { The timestamp associated with the run event in milliseconds """ timestampMillis: Long! -} + + """ + The duration of the run in milliseconds + """ + durationMillis: Long +} """ The status of the data process instance @@ -6537,10 +7269,10 @@ type PartitionSpec { """ The partition identifier """ - partition: String! + partition: String """ - The optional time window partition information + The optional time window partition information - required if type is TIMESTAMP_FIELD. """ timePartition: TimeWindow } @@ -6566,7 +7298,6 @@ type TimeWindow { durationMillis: Long! } - """ An assertion represents a programmatic validation, check, or test performed periodically against another Entity. """ @@ -6611,6 +7342,22 @@ type Assertion implements EntityWithRelationships & Entity { Edges extending from this entity grouped by direction in the lineage graph """ lineage(input: LineageInput!): EntityLineageResult + + """ + Status metadata of the assertion + """ + status: Status + + """ + The standard tags for the Assertion + """ + tags: GlobalTags + + """ + Experimental API. + For fetching extra aspects that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -6626,6 +7373,16 @@ type AssertionInfo { Dataset-specific assertion information """ datasetAssertion: DatasetAssertionInfo + + """ + An optional human-readable description of the assertion + """ + description: String + + """ + URL where assertion details are available + """ + externalUrl: String } """ @@ -6702,6 +7459,11 @@ type AssertionRunEvent implements TimeSeriesAspect { """ timestampMillis: Long! + """ + The time at which the run event was last observed by the DataHub system - ie, when it was reported by external systems + """ + lastObservedMillis: Long + """ Urn of assertion which is evaluated """ @@ -6718,7 +7480,7 @@ type AssertionRunEvent implements TimeSeriesAspect { runId: String! """ - The status of the assertion run as per this timeseries event. + The status of the assertion run as per this timeseries event """ status: AssertionRunStatus! @@ -6783,6 +7545,75 @@ type AssertionResult { """ nativeResults: [StringMapEntry!] + """ + Error details, if type is ERROR + """ + error: AssertionResultError +} + +""" +An error encountered when evaluating an AssertionResult +""" +type AssertionResultError { + """ + The type of error encountered + """ + type: AssertionResultErrorType! + + """ + Additional metadata depending on the type of error + """ + properties: [StringMapEntry!] +} + +""" +The type of error encountered when evaluating an AssertionResult +""" +enum AssertionResultErrorType { + """ + Source is unreachable + """ + SOURCE_CONNECTION_ERROR + + """ + Source query failed to execute + """ + SOURCE_QUERY_FAILED + + """ + Invalid parameters were detected + """ + INVALID_PARAMETERS + + """ + Insufficient data to evaluate assertion + """ + INSUFFICIENT_DATA + + """ + Event type not supported by the specified source + """ + INVALID_SOURCE_TYPE + + """ + Platform not supported + """ + UNSUPPORTED_PLATFORM + + """ + Error while executing a custom SQL assertion + """ + CUSTOM_SQL_ERROR + + """ + Error while executing a field assertion + """ + FIELD_ASSERTION_ERROR + + """ + Unknown error + """ + UNKNOWN_ERROR } type BatchSpec { @@ -6811,6 +7642,11 @@ type BatchSpec { The result type of an assertion, success or failure. """ enum AssertionResultType { + """ + The assertion has not yet been fully evaluated. + """ + INIT + """ The assertion succeeded. """ @@ -6820,6 +7656,11 @@ enum AssertionResultType { The assertion failed. """ FAILURE + + """ + The assertion errored. + """ + ERROR } """ @@ -6946,6 +7787,16 @@ enum AssertionStdOperator { """ EQUAL_TO + """ + Value being asserted is not equal to value + """ + NOT_EQUAL_TO + + """ + Value being asserted is null + """ + NULL + """ Value being asserted is not null """ @@ -6981,6 +7832,16 @@ enum AssertionStdOperator { """ NOT_IN + """ + Value being asserted is true + """ + IS_TRUE + + """ + Value being asserted is false + """ + IS_FALSE + """ Other """ @@ -7027,10 +7888,29 @@ type AssertionStdParameter { The type of an AssertionStdParameter """ enum AssertionStdParameterType { + """ + A string value + """ STRING + + """ + A numeric value + """ NUMBER + + """ + A list of values. When used, the value should be formatted as a serialized JSON array. + """ LIST + + """ + A set of values. When used, the value should be formatted as a serialized JSON array. + """ SET + + """ + A value of unknown type + """ UNKNOWN } @@ -7060,10 +7940,38 @@ enum DatasetAssertionScope { } """ -The top-level assertion type. Currently single Dataset assertions are the only type supported. +The top-level assertion type. """ enum AssertionType { + """ + A single-dataset assertion. + """ DATASET + """ + An assertion which indicates when a particular operation should occur to an asset. + """ + FRESHNESS + """ + An assertion which indicates how much data should be available for a particular asset. + """ + VOLUME + """ + A raw SQL-statement based assertion. + """ + SQL + """ + A structured assertion targeting a specific column or field of the Dataset. + """ + FIELD + """ + A schema or structural assertion. + """ + DATA_SCHEMA + + """ + A custom assertion. + """ + CUSTOM } """ @@ -7085,6 +7993,11 @@ type AssertionRunEventsResult { """ succeeded: Int! + """ + The number of errored run events + """ + errored: Int! + """ The run events themselves """ @@ -7300,6 +8213,11 @@ type Deprecation { The user who will be credited for modifying this deprecation content """ actor: String + + """ + The hydrated user who will be credited for modifying this deprecation content + """ + actorEntity: Entity } """ @@ -8347,6 +9265,11 @@ input ListPoliciesInput { Optional search query """ query: String + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [AndFilterInput!] } """ @@ -8717,6 +9640,27 @@ type MLModel implements EntityWithRelationships & Entity & BrowsableEntity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -8828,6 +9772,27 @@ type MLModelGroup implements EntityWithRelationships & Entity & BrowsableEntity Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLModelGroupProperties { @@ -8952,6 +9917,27 @@ type MLFeature implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLHyperParam { @@ -9121,6 +10107,27 @@ type MLPrimaryKey implements EntityWithRelationships & Entity { Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLPrimaryKeyProperties { @@ -9248,6 +10255,27 @@ type MLFeatureTable implements EntityWithRelationships & Entity & BrowsableEntit Whether or not this entity exists on DataHub """ exists: Boolean + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } type MLFeatureTableEditableProperties { @@ -9556,6 +10584,22 @@ enum CostType { ORG_COST_TYPE } + +""" +Audit stamp containing a resolved actor +""" +type ResolvedAuditStamp { + """ + When the audited action took place + """ + time: Long! + + """ + Who performed the audited action + """ + actor: CorpUser +} + type SubTypes { """ The sub-types that this entity implements. e.g. Datasets that are views will implement the "view" subtype @@ -9623,6 +10667,27 @@ type Domain implements Entity { Edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -9870,6 +10935,11 @@ enum HealthStatusType { Assertions status """ ASSERTIONS + + """ + Incidents status + """ + INCIDENTS } """ @@ -10118,6 +11188,12 @@ type DataHubRole implements Entity { The description of the Role """ description: String! + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] } """ @@ -10201,9 +11277,29 @@ input CreatePostInput { } """ -Input provided for filling in a post content +Input provided when creating a Post """ -input UpdatePostContentInput { +input UpdatePostInput { + """ + The urn of the post to edit or update + """ + urn: String!, + + """ + The type of post + """ + postType: PostType! + + """ + The content of the post + """ + content: UpdatePostContentInput! +} + +""" +Input provided for filling in a post content +""" +input UpdatePostContentInput { """ The type of post content """ @@ -10669,6 +11765,11 @@ enum QuerySource { The query was provided manually, e.g. from the UI. """ MANUAL + + """ + The query was extracted by the system, e.g. from a dashboard. + """ + SYSTEM } """ @@ -10719,6 +11820,11 @@ type QueryProperties { An Audit Stamp corresponding to the update of this resource """ lastModified: AuditStamp! + + """ + The asset that this query originated from, e.g. a View, a dbt Model, etc. + """ + origin: Entity } """ @@ -10759,6 +11865,11 @@ type QueryEntity implements Entity { Granular API for querying edges extending from this entity """ relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Platform from which the Query was detected + """ + platform: DataPlatform } """ @@ -10974,6 +12085,27 @@ type DataProduct implements Entity { Tags used for searching Data Product """ tags: GlobalTags + + """ + Experimental API. + For fetching extra entities that do not have custom UI code yet + """ + aspects(input: AspectParams): [RawAspect!] + + """ + Structured properties about this asset + """ + structuredProperties: StructuredProperties + + """ + The forms associated with the Dataset + """ + forms: Forms + + """ + Privileges given to a user relevant to this entity + """ + privileges: EntityPrivileges } """ @@ -11044,6 +12176,10 @@ input CreateDataProductInput { The primary key of the Domain """ domainUrn: String! + """ + An optional id for the new data product + """ + id: String } """ @@ -11061,6 +12197,7 @@ input CreateDataProductPropertiesInput { description: String } + """ Input properties required for update a DataProduct """ @@ -11224,4 +12361,388 @@ input UpdateOwnershipTypeInput { The description of the Custom Ownership Type """ description: String -} \ No newline at end of file +} + +""" +A standardized type of a user +""" +type DataHubPersona { + """ + The urn of the persona type + """ + urn: String! +} + +""" +Describes a generic filter on a dataset +""" +type DatasetFilter { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +Type of partition +""" +enum DatasetFilterType { + """ + Use a SQL string to apply the filter + """ + SQL +} + + +""" +Input required to create or update a DatasetFilter +""" +input DatasetFilterInput { + """ + Type of partition + """ + type: DatasetFilterType! + + """ + The raw query if using a SQL FilterType + """ + sql: String +} + +""" +An entity type registered in DataHub +""" +type EntityTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: EntityTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual entity type +""" +type EntityTypeInfo { + """ + The standard entity type + """ + type: EntityType! + + """ + The fully qualified name of the entity type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} + +""" +A restricted entity that the user does not have full permissions to view. +This entity type does not relate to an entity type in the database. +""" +type Restricted implements Entity & EntityWithRelationships { + """ + The primary key of the restricted entity + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + Edges extending from this entity grouped by direction in the lineage graph + """ + lineage(input: LineageInput!): EntityLineageResult +} + + +""" +A Business Attribute, or a logical schema Field +""" +type BusinessAttribute implements Entity { + """ + The primary key of the Data Product + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Properties about a Business Attribute + """ + properties: BusinessAttributeInfo + + """ + Ownership metadata of the Business Attribute + """ + ownership: Ownership + + """ + References to internal resources related to Business Attribute + """ + institutionalMemory: InstitutionalMemory + + """ + Status of the Dataset + """ + status: Status + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Business Attribute type +""" + +type BusinessAttributeInfo { + + """ + name of the business attribute + """ + name: String! + + """ + description of business attribute + """ + description: String + + """ + Tags associated with the business attribute + """ + tags: GlobalTags + + """ + Glossary terms associated with the business attribute + """ + glossaryTerms: GlossaryTerms + + """ + Platform independent field type of the field + """ + type: SchemaFieldDataType + + """ + A list of platform specific metadata tuples + """ + customProperties: [CustomPropertiesEntry!] + + """ + An AuditStamp corresponding to the creation of this chart + """ + created: AuditStamp! + + """ + An AuditStamp corresponding to the modification of this chart + """ + lastModified: AuditStamp! + + """ + An optional AuditStamp corresponding to the deletion of this chart + """ + deleted: AuditStamp +} + +""" +Input required for creating a BusinessAttribute. +""" +input CreateBusinessAttributeInput { + """ + Optional! A custom id to use as the primary key identifier. If not provided, a random UUID will be generated as the id. + """ + id: String + + """ + name of the business attribute + """ + name: String! + + """ + description of business attribute + """ + description: String + + """ + Platform independent field type of the field + """ + type: SchemaFieldDataType + +} + +input BusinessAttributeInfoInput { + """ + name of the business attribute + """ + name: String! + + """ + description of business attribute + """ + description: String + + """ + Platform independent field type of the field + """ + type: SchemaFieldDataType +} + +""" +Input required to update Business Attribute +""" +input UpdateBusinessAttributeInput { + """ + name of the business attribute + """ + name: String + + """ + business attribute description + """ + description: String + + """ + type + """ + type: SchemaFieldDataType +} + +""" +Input required to attach Business Attribute +If businessAttributeUrn is null, then it will remove the business attribute from the resource +""" +input AddBusinessAttributeInput { + """ + The urn of the business attribute to add + """ + businessAttributeUrn: String! + + """ + resource urns to add the business attribute to + """ + resourceUrn: [ResourceRefInput!]! +} + +""" +Business attributes attached to the metadata +""" +type BusinessAttributes { + """ + Business Attribute attached to the Metadata Entity + """ + businessAttribute: BusinessAttributeAssociation +} + +""" +Input required to attach business attribute to an entity +""" +type BusinessAttributeAssociation { + """ + Business Attribute itself + """ + businessAttribute: BusinessAttribute! + + """ + Reference back to the associated urn for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! +} + +""" +Input provided when listing Business Attribute +""" +input ListBusinessAttributesInput { + """ + The starting offset of the result set returned + """ + start: Int + + """ + The maximum number of Business Attributes to be returned in the result set + """ + count: Int + + """ + Optional search query + """ + query: String +} + +""" +The result obtained when listing Business Attribute +""" +type ListBusinessAttributesResult { + """ + The starting offset of the result set returned + """ + start: Int! + + """ + The number of Business Attributes in the returned result set + """ + count: Int! + + """ + The total number of Business Attributes in the result set + """ + total: Int! + + """ + The Business Attributes + """ + businessAttributes: [BusinessAttribute!]! +} + +""" +A cron schedule +""" +type CronSchedule { + """ + A cron-formatted execution interval, as a cron string, e.g. 1 * * * * + """ + cron: String! + + """ + Timezone in which the cron interval applies, e.g. America/Los_Angeles + """ + timezone: String! +} diff --git a/datahub-graphql-core/src/main/resources/forms.graphql b/datahub-graphql-core/src/main/resources/forms.graphql new file mode 100644 index 00000000000000..4a4e2705095968 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/forms.graphql @@ -0,0 +1,596 @@ +extend type Mutation { + """ + Remove a form from a given list of entities. + """ + batchRemoveForm(input: BatchRemoveFormInput!): Boolean! + + """ + Create a new form based on the input + """ + createForm(input: CreateFormInput!): Form! + + """ + Delete a given form + """ + deleteForm(input: DeleteFormInput!): Boolean! + + """ + Update an existing form based on the input + """ + updateForm(input: UpdateFormInput!): Form! +} + +""" +Requirements forms that are assigned to an entity. +""" +type Forms { + """ + Forms that are still incomplete. + """ + incompleteForms: [FormAssociation!]! + + """ + Forms that have been completed. + """ + completedForms: [FormAssociation!]! + + """ + Verifications that have been applied to the entity via completed forms. + """ + verifications: [FormVerificationAssociation!]! +} + +type FormAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + Reference back to the urn with the form on it for tracking purposes e.g. when sibling nodes are merged together + """ + associatedUrn: String! + + """ + The prompt that still need to be completed for this form + """ + incompletePrompts: [FormPromptAssociation!] + + """ + The prompt that are already completed for this form + """ + completedPrompts: [FormPromptAssociation!] +} + +""" +Verification object that has been applied to the entity via a completed form. +""" +type FormVerificationAssociation { + """ + The form related to the associated urn + """ + form: Form! + + """ + When this verification was applied to this entity + """ + lastModified: ResolvedAuditStamp +} + +""" +A form that helps with filling out metadata on an entity +""" +type FormPromptAssociation { + """ + The unique id of the form prompt + """ + id: String! + + """ + When and by whom this form prompt has last been modified + """ + lastModified: ResolvedAuditStamp! + + """ + Optional information about the field-level prompt associations. + """ + fieldAssociations: FormPromptFieldAssociations +} + +""" +Information about the field-level prompt associations. +""" +type FormPromptFieldAssociations { + """ + If this form prompt is for fields, this will contain a list of completed associations per field + """ + completedFieldPrompts: [FieldFormPromptAssociation!] + + """ + If this form prompt is for fields, this will contain a list of incomlete associations per field + """ + incompleteFieldPrompts: [FieldFormPromptAssociation!] +} + +""" +An association for field-level form prompts +""" +type FieldFormPromptAssociation { + """ + The schema field path + """ + fieldPath: String! + + """ + When and by whom this form field-level prompt has last been modified + """ + lastModified: ResolvedAuditStamp! +} + +""" +A form that helps with filling out metadata on an entity +""" +type Form implements Entity { + """ + A primary key associated with the Form + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Information about this form + """ + info: FormInfo! + + """ + Ownership metadata of the form + """ + ownership: Ownership + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The type of a form. This is optional on a form entity +""" +enum FormType { + """ + This form is used for "verifying" entities as a state for governance and compliance + """ + VERIFICATION + + """ + This form is used to help with filling out metadata on entities + """ + COMPLETION +} + +""" +Properties about an individual Form +""" +type FormInfo { + """ + The name of this form + """ + name: String! + + """ + The description of this form + """ + description: String + + """ + The type of this form + """ + type: FormType! + + """ + The prompt for this form + """ + prompts: [FormPrompt!]! + + """ + The actors that are assigned to complete the forms for the associated entities. + """ + actors: FormActorAssignment! +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type FormPrompt { + """ + The ID of this prompt. This will be globally unique. + """ + id: String! + + """ + The title of this prompt + """ + title: String! + + """ + The urn of the parent form that this prompt is part of + """ + formUrn: String! + + """ + The description of this prompt + """ + description: String + + """ + The description of this prompt + """ + type: FormPromptType! + + """ + Whether the prompt is required for the form to be considered completed. + """ + required: Boolean! + + """ + The params for this prompt if type is STRUCTURED_PROPERTY + """ + structuredPropertyParams: StructuredPropertyParams +} + +""" +Enum of all form prompt types +""" +enum FormPromptType { + """ + A structured property form prompt type. + """ + STRUCTURED_PROPERTY + """ + A schema field-level structured property form prompt type. + """ + FIELDS_STRUCTURED_PROPERTY +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +type StructuredPropertyParams { + """ + The structured property required for the prompt on this entity + """ + structuredProperty: StructuredPropertyEntity! +} + +""" +Input for responding to a singular prompt in a form +""" +input SubmitFormPromptInput { + """ + The unique ID of the prompt this input is responding to + """ + promptId: String! + + """ + The urn of the form that this prompt is a part of + """ + formUrn: String! + + """ + The type of prompt that this input is responding to + """ + type: FormPromptType! + + """ + The fieldPath on a schema field that this prompt submission is association with. + This should be provided when the prompt is type FIELDS_STRUCTURED_PROPERTY + """ + fieldPath: String + + """ + The structured property required for the prompt on this entity + """ + structuredPropertyParams: StructuredPropertyInputParams +} + + +""" +Input for collecting structured property values to apply to entities +""" +input PropertyValueInput { + """ + The string value for this structured property + """ + stringValue: String + + """ + The number value for this structured property + """ + numberValue: Float +} + +""" +A prompt shown to the user to collect metadata about an entity +""" +input StructuredPropertyInputParams { + """ + The urn of the structured property being applied to an entity + """ + structuredPropertyUrn: String! + + """ + The list of values you want to apply on this structured property to an entity + """ + values: [PropertyValueInput!]! +} + +""" +Input for batch assigning a form to different entities +""" +input BatchAssignFormInput { + """ + The urn of the form being assigned to entities + """ + formUrn: String! + + """ + The entities that this form is being assigned to + """ + entityUrns: [String!]! +} + +""" +Input for batch assigning a form to different entities +""" +input CreateDynamicFormAssignmentInput { + """ + The urn of the form being assigned to entities that match some criteria + """ + formUrn: String! + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters). + Entities that match this filter will have this form applied to them. + Currently, we only support a set of fields to filter on and they are: + (1) platform (2) subType (3) container (4) _entityType (5) domain + """ + orFilters: [AndFilterInput!]! +} + +type FormActorAssignment { + """ + Whether the form should be completed by owners of the assets which the form is applied to. + """ + owners: Boolean! + + """ + Urns of the users that the form is assigned to. If null, then no users are specifically targeted. + """ + users: [CorpUser!] + + """ + Groups that the form is assigned to. If null, then no groups are specifically targeted. + """ + groups: [CorpGroup!] + + """ + Whether or not the current actor is universally assigned to this form, either by user or by group. + Note that this does not take into account entity ownership based assignment. + """ + isAssignedToMe: Boolean! +} + +""" +Input for verifying forms on entities +""" +input VerifyFormInput { + """ + The urn of the form being verified on an entity + """ + formUrn: String! + + """ + The urn of the entity that is having a form verified on it + """ + entityUrn: String! +} + +""" +Input for batch removing a form from different entities +""" +input BatchRemoveFormInput { + """ + The urn of the form being removed from entities + """ + formUrn: String! + + """ + The entities that this form is being removed from + """ + entityUrns: [String!]! +} + +""" +Input for batch removing a form from different entities +""" +input CreateFormInput { + """ + Advanced: Optionally provide an ID to create a form urn from + """ + id: String + + """ + The name of the form being created + """ + name: String! + + """ + The optional description of the form being created + """ + description: String + + """ + The type of this form, whether it's verification or completion. Default is completion. + """ + type: FormType + + """ + The type of this form, whether it's verification or completion. Default is completion. + """ + prompts: [CreatePromptInput!] + + """ + Information on how this form should be assigned to users/groups + """ + actors: FormActorAssignmentInput +} + +""" +Input for creating form prompts +""" +input CreatePromptInput { + """ + Advanced: Optionally provide an ID to this prompt. All prompt IDs must be globally unique. + """ + id: String + + """ + The title of the prompt + """ + title: String! + + """ + The optional description of the prompt + """ + description: String + + """ + The type of the prompt. + """ + type: FormPromptType! + + """ + The params required if this prompt type is STRUCTURED_PROPERTY or FIELDS_STRUCTURED_PROPERTY + """ + structuredPropertyParams: StructuredPropertyParamsInput + + """ + Whether this prompt will be required or not. Default is false. + """ + required: Boolean + +} + +""" +Input for assigning a form to actors +""" +input FormActorAssignmentInput { + """ + Whether this form will be applied to owners of associated entities or not. Default is true. + """ + owners: Boolean + + """ + The optional list of user urns to assign this form to + """ + users: [String!] + + """ + The optional list of group urns to assign this form to + """ + groups: [String!] +} + +""" +Input for a structured property type prompt +""" +input StructuredPropertyParamsInput { + """ + The urn of the structured property for a given form prompt + """ + urn: String! +} + +""" +Input for updating a form +""" +input UpdateFormInput { + """ + The urn of the form being updated + """ + urn: String! + + """ + The new name of the form + """ + name: String + + """ + The new description of the form + """ + description: String + + """ + The new type of the form + """ + type: FormType + + """ + The new prompts being added to this form + """ + promptsToAdd: [CreatePromptInput!] + + """ + The IDs of the prompts to remove from this form + """ + promptsToRemove: [String!] + + """ + Information on how this form should be assigned to users/groups + """ + actors: FormActorAssignmentUpdateInput +} + +""" +Update input for assigning a form to actors +""" +input FormActorAssignmentUpdateInput { + """ + Whether this form will be applied to owners of associated entities or not. Default is true. + """ + owners: Boolean + + """ + The optional list of user urns to assign this form to + """ + usersToAdd: [String!] + + """ + The users being removed from being assigned to this form + """ + usersToRemove: [String!] + + """ + The optional list of group urns to assign this form to + """ + groupsToAdd: [String!] + + """ + The groups being removed from being assigned to this form + """ + groupsToRemove: [String!] +} + +""" +Input for deleting a form +""" +input DeleteFormInput { + """ + The urn of the form that is being deleted + """ + urn: String! +} diff --git a/datahub-graphql-core/src/main/resources/incident.graphql b/datahub-graphql-core/src/main/resources/incident.graphql new file mode 100644 index 00000000000000..c2938543ed9494 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/incident.graphql @@ -0,0 +1,380 @@ +extend type Mutation { + """ + Create a new incident for a resource (asset) + """ + raiseIncident( + """ + Input required to create a new incident + """ + input: RaiseIncidentInput!): String + + """ + Update an existing incident for a resource (asset) + """ + updateIncidentStatus( + """ + The urn for an existing incident + """ + urn: String! + + """ + Input required to update the state of an existing incident + """ + input: UpdateIncidentStatusInput!): Boolean +} + +""" +A list of Incidents Associated with an Entity +""" +type EntityIncidentsResult { + """ + The starting offset of the result set returned + """ + start: Int! + + """ + The number of assertions in the returned result set + """ + count: Int! + + """ + The total number of assertions in the result set + """ + total: Int! + + """ + The incidents themselves + """ + incidents: [Incident!]! +} + +""" +An incident represents an active issue on a data asset. +""" +type Incident implements Entity { + """ + The primary key of the Incident + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + The type of incident + """ + incidentType: IncidentType! + + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + + """ + An optional title associated with the incident + """ + title: String + + """ + An optional description associated with the incident + """ + description: String + + """ + The status of an incident + """ + status: IncidentStatus! + + """ + Optional priority of the incident. Lower value indicates higher priority. + """ + priority: Int + + """ + The entity that the incident is associated with. + """ + entity: Entity! + + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSource + + """ + The time at which the incident was initially created + """ + created: AuditStamp! + + """ + The standard tags for the Incident + """ + tags: GlobalTags + + """ + List of relationships between the source Entity and some destination entities with a given types + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +The state of an incident. +""" +enum IncidentState { + """ + The incident is ongoing, or active. + """ + ACTIVE + """ + The incident is resolved. + """ + RESOLVED +} + +""" +A specific type of incident +""" +enum IncidentType { + """ + A Freshness Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + FRESHNESS + + """ + A Volume Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + VOLUME + + """ + A Field Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + FIELD + + """ + A SQL Assertion has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + SQL + + """ + A Schema has failed, triggering the incident. + Raised on assets where assertions are configured to generate incidents. + """ + DATA_SCHEMA + + """ + An operational incident, e.g. failure to materialize a dataset, or failure to execute a task / pipeline. + """ + OPERATIONAL + + """ + A custom type of incident + """ + CUSTOM +} + + +""" +Details about the status of an asset incident +""" +type IncidentStatus { + """ + The state of the incident + """ + state: IncidentState! + """ + An optional message associated with the status + """ + message: String + """ + The time that the status last changed + """ + lastUpdated: AuditStamp! +} + +""" +The source type of an incident, implying how it was created. +""" +enum IncidentSourceType { + """ + The incident was created manually, from either the API or the UI. + """ + MANUAL + + """ + An assertion has failed, triggering the incident. + """ + ASSERTION_FAILURE +} + +""" +Details about the source of an incident, e.g. how it was created. +""" +type IncidentSource { + """ + The type of the incident source + """ + type: IncidentSourceType! + + """ + The source of the incident. If the source type is ASSERTION_FAILURE, this will have the assertion that generated the incident. + """ + source: Entity +} + +""" +Input required to create a new incident in the 'Active' state. +""" +input RaiseIncidentInput { + """ + The type of incident + """ + type: IncidentType! + """ + A custom type of incident. Present only if type is 'CUSTOM' + """ + customType: String + """ + An optional title associated with the incident + """ + title: String + """ + An optional description associated with the incident + """ + description: String + """ + The resource (dataset, dashboard, chart, dataFlow, etc) that the incident is associated with. + """ + resourceUrn: String! + """ + The source of the incident, i.e. how it was generated + """ + source: IncidentSourceInput + """ + An optional priority for the incident. Lower value indicates a higher priority. + """ + priority: Int +} + +""" +Input required to create an incident source +""" +input IncidentSourceInput { + """ + The type of the incident source + """ + type: IncidentSourceType! +} + +""" +Input required to update status of an existing incident +""" +input UpdateIncidentStatusInput { + """ + The new state of the incident + """ + state: IncidentState! + """ + An optional message associated with the new state + """ + message: String +} + +extend type Dataset { + """ + Incidents associated with the Dataset + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataJob { + """ + Incidents associated with the DataJob + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type DataFlow { + """ + Incidents associated with the DataFlow + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Dashboard { + """ + Incidents associated with the Dashboard + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} + +extend type Chart { + """ + Incidents associated with the Chart + """ + incidents( + """ + Optional incident state to filter by, defaults to any state. + """ + state: IncidentState, + """ + Optional start offset, defaults to 0. + """ + start: Int, + """ + Optional start offset, defaults to 20. + """ + count: Int): EntityIncidentsResult +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/ingestion.graphql b/datahub-graphql-core/src/main/resources/ingestion.graphql index 21f9fb2633119b..77327ae6d4db14 100644 --- a/datahub-graphql-core/src/main/resources/ingestion.graphql +++ b/datahub-graphql-core/src/main/resources/ingestion.graphql @@ -36,6 +36,11 @@ extend type Mutation { """ createSecret(input: CreateSecretInput!): String + """ + Update a Secret + """ + updateSecret(input: UpdateSecretInput!): String + """ Delete a Secret """ @@ -112,6 +117,11 @@ type ExecutionRequestInput { The time at which the request was created """ requestedAt: Long! + + """ + Urn of the actor who created this execution request + """ + actorUrn: String } """ @@ -560,6 +570,31 @@ input CreateSecretInput { description: String } +""" +Input arguments for updating a Secret +""" +input UpdateSecretInput { + """ + The primary key of the Secret to update + """ + urn: String! + + """ + The name of the secret for reference in ingestion recipes + """ + name: String! + + """ + The value of the secret, to be encrypted and stored + """ + value: String! + + """ + An optional description for the secret + """ + description: String +} + """ Input arguments for retrieving the plaintext values of a set of secrets """ diff --git a/datahub-graphql-core/src/main/resources/properties.graphql b/datahub-graphql-core/src/main/resources/properties.graphql new file mode 100644 index 00000000000000..dfe84686456814 --- /dev/null +++ b/datahub-graphql-core/src/main/resources/properties.graphql @@ -0,0 +1,449 @@ +extend type Mutation { + """ + Upsert structured properties onto a given asset + """ + upsertStructuredProperties(input: UpsertStructuredPropertiesInput!): StructuredProperties! + + """ + Upsert structured properties onto a given asset + """ + removeStructuredProperties(input: RemoveStructuredPropertiesInput!): StructuredProperties! + + """ + Create a new structured property + """ + createStructuredProperty(input: CreateStructuredPropertyInput!): StructuredPropertyEntity! + + """ + Update an existing structured property + """ + updateStructuredProperty(input: UpdateStructuredPropertyInput!): StructuredPropertyEntity! +} + +""" +A structured property that can be shared between different entities +""" +type StructuredPropertyEntity implements Entity { + """ + A primary key associated with the structured property + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Definition of this structured property including its name + """ + definition: StructuredPropertyDefinition! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +Properties about an individual Query +""" +type StructuredPropertyDefinition { + """ + The fully qualified name of the property. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this structured property + """ + displayName: String + + """ + The description of this property + """ + description: String + + """ + The cardinality of a Structured Property determining whether one or multiple values + can be applied to the entity from this property. + """ + cardinality: PropertyCardinality + + """ + A list of allowed values that the property is allowed to take. + """ + allowedValues: [AllowedValue!] + + """ + The type of this structured property + """ + valueType: DataTypeEntity! + + """ + Allows for type specialization of the valueType to be more specific about which + entity types are allowed, for example. + """ + typeQualifier: TypeQualifier + + """ + Entity types that this structured property can be applied to + """ + entityTypes: [EntityTypeEntity!]! + + """ + Whether or not this structured property is immutable + """ + immutable: Boolean! +} + +""" +An entry for an allowed value for a structured property +""" +type AllowedValue { + """ + The allowed value + """ + value: PropertyValue! + + """ + The description of this allowed value + """ + description: String +} + +""" +The cardinality of a Structured Property determining whether one or multiple values +can be applied to the entity from this property. +""" +enum PropertyCardinality { + """ + Only one value of this property can applied to an entity + """ + SINGLE + + """ + Multiple values of this property can applied to an entity + """ + MULTIPLE +} + +""" +Allows for type specialization of the valueType to be more specific about which +entity types are allowed, for example. +""" +type TypeQualifier { + """ + The list of allowed entity types + """ + allowedTypes: [EntityTypeEntity!] +} + +""" +String property value +""" +type StringValue { + """ + The value of a string type property + """ + stringValue: String! +} + +""" +Numeric property value +""" +type NumberValue { + """ + The value of a number type property + """ + numberValue: Float! +} + +""" +The value of a property +""" +union PropertyValue = StringValue | NumberValue + +""" +An entry in an structured properties list represented as a tuple +""" +type StructuredPropertiesEntry { + """ + The key of the map entry + """ + structuredProperty: StructuredPropertyEntity! + + """ + The values of the structured property for this entity + """ + values: [PropertyValue]! + + """ + The optional entities associated with the values if the values are entity urns + """ + valueEntities: [Entity] +} + +""" +Input for upserting structured properties on a given asset +""" +input UpsertStructuredPropertiesInput { + """ + The urn of the asset that we are updating + """ + assetUrn: String! + + """ + The list of structured properties you want to upsert on this asset + """ + structuredPropertyInputParams: [StructuredPropertyInputParams!]! +} + +""" +Input for removing structured properties on a given asset +""" +input RemoveStructuredPropertiesInput { + """ + The urn of the asset that we are removing properties from + """ + assetUrn: String! + + """ + The list of structured properties you want to remove from this asset + """ + structuredPropertyUrns: [String!]! +} + +""" +A data type registered in DataHub +""" +type DataTypeEntity implements Entity { + """ + A primary key associated with the Query + """ + urn: String! + + """ + A standard Entity Type + """ + type: EntityType! + + """ + Info about this type including its name + """ + info: DataTypeInfo! + + """ + Granular API for querying edges extending from this entity + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult +} + +""" +A well-supported, standard DataHub Data Type. +""" +enum StdDataType { + """ + String data type + """ + STRING + + """ + Number data type + """ + NUMBER + + """ + Urn data type + """ + URN + + """ + Rich text data type. Right now this is markdown only. + """ + RICH_TEXT + + """ + Date data type in format YYYY-MM-DD + """ + DATE + + """ + Any other data type - refer to a provided data type urn. + """ + OTHER +} + +""" +Properties about an individual data type +""" +type DataTypeInfo { + """ + The standard data type + """ + type: StdDataType! + + """ + The fully qualified name of the type. This includes its namespace + """ + qualifiedName: String! + + """ + The display name of this type + """ + displayName: String + + """ + The description of this type + """ + description: String +} + +""" +Input for creating a new structured property entity +""" +input CreateStructuredPropertyInput { + """ + (Advanced) An optional unique ID to use when creating the urn of this entity + """ + id: String + + """ + The unique fully qualified name of this structured property, dot delimited. + """ + qualifiedName: String! + + """ + The optional display name for this property + """ + displayName: String + + """ + The optional description for this property + """ + description: String + + """ + Whether the property will be mutable once it is applied or not. Default is false. + """ + immutable: Boolean + + """ + The urn of the value type that this structured property accepts. + For example: urn:li:dataType:datahub.string or urn:li:dataType:datahub.date + """ + valueType: String! + + """ + The optional input for specifying specific entity types as values + """ + typeQualifier: TypeQualifierInput + + """ + The optional input for specifying a list of allowed values + """ + allowedValues: [AllowedValueInput!] + + """ + The optional input for specifying if one or multiple values can be applied. + Default is one value (single cardinality) + """ + cardinality: PropertyCardinality + + """ + The list of entity types that this property can be applied to. + For example: ["urn:li:entityType:datahub.dataset"] + """ + entityTypes: [String!]! +} + +""" +Input for specifying specific entity types as values +""" +input TypeQualifierInput { + """ + The list of allowed entity types as urns (ie. ["urn:li:entityType:datahub.corpuser"]) + """ + allowedTypes: [String!] +} + +""" +An input entry for an allowed value for a structured property +""" +input AllowedValueInput { + """ + The allowed string value if the value is of type string + Either this or numberValue is required. + """ + stringValue: String + + """ + The allowed number value if the value is of type number. + Either this or stringValue is required. + """ + numberValue: Float + + """ + The description of this allowed value + """ + description: String +} + +""" +Input for updating an existing structured property entity +""" +input UpdateStructuredPropertyInput { + """ + The urn of the structured property being updated + """ + urn: String! + + """ + The optional display name for this property + """ + displayName: String + + """ + The optional description for this property + """ + description: String + + """ + Whether the property will be mutable once it is applied or not. Default is false. + """ + immutable: Boolean + + """ + The optional input for specifying specific entity types as values + """ + typeQualifier: UpdateTypeQualifierInput + + """ + Append to the list of allowed values for this property. + For backwards compatibility, this is append only. + """ + newAllowedValues: [AllowedValueInput!] + + """ + Set to true if you want to change the cardinality of this structured property + to multiple. Cannot change from multiple to single for backwards compatibility reasons. + """ + setCardinalityAsMultiple: Boolean + + """ + Append to the list of entity types that this property can be applied to. + For backwards compatibility, this is append only. + """ + newEntityTypes: [String!] +} + +""" +Input for updating specifying specific entity types as values +""" +input UpdateTypeQualifierInput { + """ + Append to the list of allowed entity types as urns for this property (ie. ["urn:li:entityType:datahub.corpuser"]) + For backwards compatibility, this is append only. + """ + newAllowedTypes: [String!] +} diff --git a/datahub-graphql-core/src/main/resources/recommendation.graphql b/datahub-graphql-core/src/main/resources/recommendation.graphql index 439b22142b0cb8..d329d71fbef694 100644 --- a/datahub-graphql-core/src/main/resources/recommendation.graphql +++ b/datahub-graphql-core/src/main/resources/recommendation.graphql @@ -23,6 +23,11 @@ input ListRecommendationsInput { Max number of modules to return """ limit: Int + + """ + Optional - A View to apply when generating results + """ + viewUrn: String } """ diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index e0cde5a2db9f99..09a7217073527b 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -143,6 +143,62 @@ input SearchFlags { Whether to request for search suggestions on the _entityName virtualized field """ getSuggestions: Boolean + + """ + Additional grouping specifications to apply to the search results + Grouping specifications will control how search results are grouped together + in the response. This is currently being used to group schema fields (columns) + as datasets, and in the future will be used to group other entities as well. + Note: This is an experimental feature and is subject to change. + """ + groupingSpec: GroupingSpec + + """ + Whether to include soft deleted entities + """ + includeSoftDeleted: Boolean + + """ + Whether to include restricted entities + """ + includeRestricted: Boolean +} + +""" +Flags to control lineage behavior +""" +input LineageFlags { + """ + Limits the number of results explored per hop, still gets all edges each time a hop happens + """ + entitiesExploredPerHopLimit: Int + + """ + An optional starting time to filter on + """ + startTimeMillis: Long + """ + An optional ending time to filter on + """ + endTimeMillis: Long + + """ + Map of entity types to platforms to ignore when counting hops during graph walk. Note: this can potentially cause + a large amount of additional hops to occur and should be used with caution. + """ + ignoreAsHops: [EntityTypeToPlatforms!] +} + +input EntityTypeToPlatforms { + """ + Entity type to ignore as hops, if no platform is applied applies to all entities of this type. + """ + entityType: EntityType! + + """ + List of platforms to ignore as hops, empty implies all. Must be a valid platform urn + """ + platforms: [String!] } """ @@ -278,6 +334,7 @@ input ScrollAcrossEntitiesInput { searchFlags: SearchFlags } + """ Input arguments for a search query over the results of a multi-hop graph query """ @@ -325,16 +382,21 @@ input SearchAcrossLineageInput { """ An optional starting time to filter on """ - startTimeMillis: Long + startTimeMillis: Long @deprecated(reason: "Use LineageFlags instead") """ An optional ending time to filter on """ - endTimeMillis: Long + endTimeMillis: Long @deprecated(reason: "Use LineageFlags instead") """ Flags controlling search options """ searchFlags: SearchFlags + + """ + Flags controlling the lineage query + """ + lineageFlags: LineageFlags } """ @@ -395,6 +457,11 @@ input ScrollAcrossLineageInput { Flags controlling search options """ searchFlags: SearchFlags + + """ + Flags controlling the lineage query + """ + lineageFlags: LineageFlags } """ @@ -577,7 +644,7 @@ type ScrollResults { } """ -Results returned by issueing a search across relationships query +Results returned by issuing a search across relationships query """ type SearchAcrossLineageResults { """ @@ -612,7 +679,7 @@ type SearchAcrossLineageResults { } """ -Results returned by issueing a search across relationships query using scroll API +Results returned by issuing a search across relationships query using scroll API """ type ScrollAcrossLineageResults { """ @@ -669,6 +736,27 @@ type SearchAcrossLineageResult { Degree of relationship (number of hops to get to entity) """ degree: Int! + + """ + Degrees of relationship (for entities discoverable at multiple degrees) + """ + degrees: [Int!] + + """ + Marks whether or not this entity was explored further for lineage + """ + explored: Boolean! + + """ + Indicates this destination node has additional unexplored child relationships + """ + truncatedChildren: Boolean! + + """ + Whether this relationship was ignored as a hop + """ + ignoredAsHop: Boolean! + } """ @@ -1139,7 +1227,7 @@ type QuickFilter { } """ -Freshness stats for a query result. +Freshness stats for a query result. Captures whether the query was served out of a cache, what the staleness was, etc. """ type FreshnessStats { @@ -1154,7 +1242,7 @@ type FreshnessStats { In case an index was consulted, this reflects the freshness of the index """ systemFreshness: [SystemFreshness] - + } type SystemFreshness { @@ -1176,9 +1264,14 @@ Input required for browse queries """ input BrowseV2Input { """ - The browse entity type + The browse entity type - deprecated use types instead """ - type: EntityType! + type: EntityType + + """ + The browse entity type - deprecated use types instead. If not provided, all types will be used. + """ + types: [EntityType!] """ The browse path V2 - a list with each entry being part of the browse path V2 @@ -1209,6 +1302,11 @@ input BrowseV2Input { The search query string """ query: String + + """ + Flags controlling search options + """ + searchFlags: SearchFlags } """ @@ -1274,7 +1372,12 @@ input SearchSortInput { """ A criterion to sort search results on """ - sortCriterion: SortCriterion! + sortCriterion: SortCriterion @deprecated(reason: "Use sortCriteria instead") + + """ + A list of values to sort search results on + """ + sortCriteria: [SortCriterion!] } """ @@ -1299,3 +1402,36 @@ input SortCriterion { """ sortOrder: SortOrder! } + +""" +A grouping specification for search results. +""" +input GroupingSpec { + + """ + A list of grouping criteria for grouping search results. + There is no implied order in the grouping criteria. + """ + groupingCriteria: [GroupingCriterion!] + +} + +""" +A single grouping criterion for grouping search results +""" +input GroupingCriterion { + + """ + The base entity type that needs to be grouped + e.g. schemaField + Omitting this field will result in all base entities being grouped into the groupingEntityType. + """ + baseEntityType: EntityType + + """ + The type of entity being grouped into + e.g. dataset, domain, etc. + """ + groupingEntityType: EntityType! + +} \ No newline at end of file diff --git a/datahub-graphql-core/src/main/resources/tests.graphql b/datahub-graphql-core/src/main/resources/tests.graphql index 9dce48ac60d834..579f4919bdc783 100644 --- a/datahub-graphql-core/src/main/resources/tests.graphql +++ b/datahub-graphql-core/src/main/resources/tests.graphql @@ -44,6 +44,7 @@ Definition of the test type TestDefinition { """ JSON-based def for the test + Deprecated! JSON representation is no longer supported. """ json: String } @@ -209,6 +210,7 @@ input UpdateTestInput { input TestDefinitionInput { """ The string representation of the Test + Deprecated! JSON representation is no longer supported. """ json: String } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 606123cac926de..522e4be0ec5ec2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -8,26 +12,24 @@ import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; -import com.linkedin.metadata.models.registry.ConfigEntityRegistry; -import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; - +import com.linkedin.r2.RemoteInvocationException; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.List; - +import java.util.stream.Collectors; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.Assert; public class TestUtils { - public static EntityService getMockEntityService() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() - .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); - EntityService mockEntityService = Mockito.mock(EntityService.class); - Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); - return mockEntityService; + public static EntityService getMockEntityService() { + return (EntityService) Mockito.mock(EntityService.class); } public static QueryContext getMockAllowContext() { @@ -35,34 +37,44 @@ public static QueryContext getMockAllowContext() { } public static QueryContext getMockAllowContext(String actorUrn) { - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); - Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); - Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); - - Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getActorUrn()).thenReturn(actorUrn); + + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); + when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); + when(mockAuthorizer.authorize(any())).thenReturn(result); + + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + when(mockContext.getAuthentication()).thenReturn(authentication); + + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); + when(mockContext.getOperationContext()).thenReturn(operationContext); + return mockContext; } public static QueryContext getMockAllowContext(String actorUrn, AuthorizationRequest request) { - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); - Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); - - Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getActorUrn()).thenReturn(actorUrn); + + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); + when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); + when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); + + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); + when(mockContext.getAuthentication()).thenReturn(authentication); + + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); + when(mockContext.getOperationContext()).thenReturn(operationContext); + return mockContext; } @@ -71,80 +83,131 @@ public static QueryContext getMockDenyContext() { } public static QueryContext getMockDenyContext(String actorUrn) { - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); - Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); - - Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getActorUrn()).thenReturn(actorUrn); + + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); + when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); + when(mockAuthorizer.authorize(any())).thenReturn(result); + + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); + when(mockContext.getAuthentication()).thenReturn(authentication); + + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); + when(mockContext.getOperationContext()).thenReturn(operationContext); + return mockContext; } public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequest request) { - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(actorUrn); - - Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationResult result = Mockito.mock(AuthorizationResult.class); - Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); - - Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getActorUrn()).thenReturn(actorUrn); + + Authorizer mockAuthorizer = mock(Authorizer.class); + AuthorizationResult result = mock(AuthorizationResult.class); + when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); + when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); + + Authentication authentication = + new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); + when(mockContext.getAuthentication()).thenReturn(authentication); + + OperationContext operationContext = + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); + when(mockContext.getOperationContext()).thenReturn(operationContext); + return mockContext; } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List proposals) { - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(proposals, mockService.getEntityRegistry()) - .build(); - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(batch), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyIngestProposal( + EntityService mockService, + int numberOfInvocations, + List proposals) { + + ArgumentCaptor batchCaptor = ArgumentCaptor.forClass(AspectsBatchImpl.class); + + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(any(), batchCaptor.capture(), Mockito.eq(false)); + + // check has time + Assert.assertTrue( + batchCaptor.getValue().getItems().stream() + .allMatch(prop -> prop.getSystemMetadata().getLastObserved() > 0L)); + + // check without time + Assert.assertEquals( + batchCaptor.getValue().getItems().stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList()), + proposals.stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList())); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, + int numberOfInvocations, + MetadataChangeProposal expectedProposal) { + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); + + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(any(), proposalCaptor.capture(), any(AuditStamp.class), Mockito.eq(false)); + + // check has time + Assert.assertTrue(proposalCaptor.getValue().getSystemMetadata().getLastObserved() > 0L); + + // check without time + proposalCaptor.getValue().getSystemMetadata().setLastObserved(0L); + expectedProposal.getSystemMetadata().setLastObserved(0L); + Assert.assertEquals(proposalCaptor.getValue(), expectedProposal); + } + + public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(any(), any(AspectsBatchImpl.class), Mockito.eq(false)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( + any(), any(MetadataChangeProposal.class), any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyNoIngestProposal(EntityService mockService) { + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), any(AspectsBatchImpl.class), Mockito.anyBoolean()); } - public static void verifyNoIngestProposal(EntityService mockService) { - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + public static void verifyIngestProposal( + EntityClient mockClient, int numberOfInvocations, MetadataChangeProposal expectedProposal) + throws RemoteInvocationException { + + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); + + Mockito.verify(mockClient, Mockito.times(numberOfInvocations)) + .ingestProposal(any(), proposalCaptor.capture(), Mockito.eq(false)); + + // check has time + Assert.assertTrue(proposalCaptor.getValue().getSystemMetadata().getLastObserved() > 0L); + + // check without time + proposalCaptor.getValue().getSystemMetadata().setLastObserved(0L); + expectedProposal.getSystemMetadata().setLastObserved(0L); + Assert.assertEquals(proposalCaptor.getValue(), expectedProposal); } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtilsTest.java new file mode 100644 index 00000000000000..073896cbeb0bd6 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtilsTest.java @@ -0,0 +1,33 @@ +package com.linkedin.datahub.graphql.authorization; + +import static org.testng.Assert.assertEquals; + +import com.linkedin.datahub.graphql.generated.ViewProperties; +import org.testng.annotations.Test; + +public class AuthorizationUtilsTest { + + @Test + public void testRestrictedViewProperties() { + // provides a test of primitive boolean + ViewProperties viewProperties = + ViewProperties.builder() + .setMaterialized(true) + .setLanguage("testLang") + .setFormattedLogic("formattedLogic") + .setLogic("testLogic") + .build(); + + String expected = + ViewProperties.builder() + .setMaterialized(true) + .setLanguage("") + .setLogic("") + .build() + .toString(); + + assertEquals( + AuthorizationUtils.restrictEntity(viewProperties, ViewProperties.class).toString(), + expected); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java index 7cd548a4790bae..f98284e92ede58 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.mockito.Mockito.mock; +import static org.testng.AssertJUnit.assertEquals; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -8,6 +12,7 @@ import com.linkedin.datahub.graphql.TestUtils; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -15,66 +20,67 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetchingEnvironment; -import org.testng.annotations.Test; -import org.mockito.Mockito; - import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static org.testng.AssertJUnit.assertEquals; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ResolverUtilsTest { @Test public void testCriterionFromFilter() throws Exception { - final DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + final DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); final QueryContext mockAllowContext = TestUtils.getMockAllowContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockAllowContext); // this is the expected path - Criterion valuesCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), - false, - FilterOperator.EQUAL - ) - ); - assertEquals(valuesCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valuesCriterion = + criterionFromFilter( + new FacetFilterInput( + "tags", + null, + ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), + false, + FilterOperator.EQUAL), + mock(AspectRetriever.class)); + assertEquals( + valuesCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); // this is the legacy pathway - Criterion valueCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - "urn:li:tag:abc", - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(valueCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc")) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valueCriterion = + criterionFromFilter( + new FacetFilterInput("tags", "urn:li:tag:abc", null, true, FilterOperator.EQUAL), + mock(AspectRetriever.class)); + assertEquals( + valueCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc"))) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); - // check that both being null doesn't cause a NPE. this should never happen except via API interaction - Criterion doubleNullCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(doubleNullCriterion, new Criterion().setValue("").setValues( - new StringArray(ImmutableList.of()) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + // check that both being null doesn't cause a NPE. this should never happen except via API + // interaction + Criterion doubleNullCriterion = + criterionFromFilter( + new FacetFilterInput("tags", null, null, true, FilterOperator.EQUAL), + mock(AspectRetriever.class)); + assertEquals( + doubleNullCriterion, + new Criterion() + .setValue("") + .setValues(new StringArray(ImmutableList.of())) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); } @Test @@ -85,21 +91,25 @@ public void testBuildFilterWithUrns() throws Exception { urns.add(urn1); urns.add(urn2); - Criterion ownersCriterion = new Criterion() - .setField("owners") - .setValues(new StringArray("urn:li:corpuser:chris")) - .setCondition(Condition.EQUAL); + Criterion ownersCriterion = + new Criterion() + .setField("owners") + .setValues(new StringArray("urn:li:corpuser:chris")) + .setCondition(Condition.EQUAL); CriterionArray andCriterionArray = new CriterionArray(ImmutableList.of(ownersCriterion)); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(andCriterionArray) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(andCriterionArray)))); Filter finalFilter = buildFilterWithUrns(urns, filter); - Criterion urnsCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnsCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); for (ConjunctiveCriterion conjunctiveCriterion : finalFilter.getOr()) { assertEquals(conjunctiveCriterion.getAnd().contains(ownersCriterion), true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index c7424174255cec..8102c68e69c874 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -1,6 +1,12 @@ package com.linkedin.datahub.graphql.resolvers; -import com.linkedin.common.urn.Urn; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.LineageEdge; @@ -9,35 +15,37 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetchingEnvironment; -import org.joda.time.DateTimeUtils; -import org.mockito.Mockito; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.joda.time.DateTimeUtils; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; public class UpdateLineageResolverTest { private static EntityService _mockService = Mockito.mock(EntityService.class); private static LineageService _lineageService; private static DataFetchingEnvironment _mockEnv; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; @BeforeMethod public void setupTest() { @@ -50,27 +58,30 @@ public void setupTest() { // Adds upstream for dataset1 to dataset2 and removes edge to dataset3 @Test public void testUpdateDatasetLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(1)); assertTrue(resolver.get(_mockEnv).get()); } @Test public void testFailUpdateWithMissingDownstream() throws Exception { - List edgesToAdd = Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); + List edgesToAdd = + Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(false); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(false); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> Set.of()); assertThrows(CompletionException.class, () -> resolver.get(_mockEnv).join()); } @@ -83,9 +94,8 @@ public void testUpdateChartLineage() throws Exception { mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(1)); assertTrue(resolver.get(_mockEnv).get()); } @@ -93,15 +103,17 @@ public void testUpdateChartLineage() throws Exception { // Adds upstream for dashboard to dataset2 and chart1 and removes edge to dataset1 @Test public void testUpdateDashboardLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_2), createLineageEdge(DASHBOARD_URN, CHART_URN)); - List edgesToRemove = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DASHBOARD_URN, DATASET_URN_2), + createLineageEdge(DASHBOARD_URN, CHART_URN)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DASHBOARD_URN))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(CHART_URN))).thenReturn(true); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(1)); assertTrue(resolver.get(_mockEnv).get()); } @@ -109,49 +121,48 @@ public void testUpdateDashboardLineage() throws Exception { // Adds upstream datajob and dataset and one downstream dataset @Test public void testUpdateDataJobLineage() throws Exception { - List edgesToAdd = Arrays.asList( - createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), - createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), - createLineageEdge(DATASET_URN_3, DATAJOB_URN_1) - ); - List edgesToRemove = Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), + createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), + createLineageEdge(DATASET_URN_3, DATAJOB_URN_1)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATAJOB_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(1)); assertTrue(resolver.get(_mockEnv).get()); } @Test public void testFailUpdateLineageNoPermissions() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_1))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_2))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_3))).thenReturn(true); - Mockito.when(_mockService.exists(Urn.createFromString(DATASET_URN_4))).thenReturn(true); + Mockito.when(_mockService.exists(any(), any(Collection.class), eq(true))) + .thenAnswer(args -> args.getArgument(1)); assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } - private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); - Mockito.when(_mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(_mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(_mockEnv.getContext()).thenReturn(mockContext); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java index 6fdb1f2b70ce4b..f6e7e7267a060e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionResult; @@ -15,13 +18,12 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.SystemMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class AssertionRunEventResolverTest { @Test public void testGetSuccess() throws Exception { @@ -29,46 +31,55 @@ public void testGetSuccess() throws Exception { final Urn assertionUrn = Urn.createFromString("urn:li:assertion:guid-1"); final Urn asserteeUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); - final AssertionRunEvent gmsRunEvent = new AssertionRunEvent() - .setTimestampMillis(12L) - .setAssertionUrn(assertionUrn) - .setRunId("test-id") - .setAsserteeUrn(asserteeUrn) - .setStatus(AssertionRunStatus.COMPLETE) - .setResult(new AssertionResult() - .setActualAggValue(10) - .setMissingCount(0L) - .setRowCount(1L) - .setType(AssertionResultType.SUCCESS) - .setUnexpectedCount(2L) - ); + final AssertionRunEvent gmsRunEvent = + new AssertionRunEvent() + .setTimestampMillis(12L) + .setAssertionUrn(assertionUrn) + .setRunId("test-id") + .setAsserteeUrn(asserteeUrn) + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setActualAggValue(10) + .setMissingCount(0L) + .setRowCount(1L) + .setType(AssertionResultType.SUCCESS) + .setUnexpectedCount(2L)); - Mockito.when(mockClient.getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.eq(AssertionRunEventResolver.buildFilter(null, AssertionRunStatus.COMPLETE.toString())), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableList.of( - new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) - ) - ); + Mockito.when( + mockClient.getTimeseriesAspectValues( + any(), + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.eq( + AssertionRunEventResolver.buildFilter( + null, AssertionRunStatus.COMPLETE.toString(), null)))) + .thenReturn( + ImmutableList.of( + new EnvelopedAspect() + .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) + .setSystemMetadata(new SystemMetadata().setLastObserved(12L)))); AssertionRunEventResolver resolver = new AssertionRunEventResolver(mockClient); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))).thenReturn("COMPLETE"); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))).thenReturn(0L); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))).thenReturn(10L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))) + .thenReturn("COMPLETE"); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))) + .thenReturn(0L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))) + .thenReturn(10L); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("limit"), Mockito.eq(null))).thenReturn(5); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -78,32 +89,39 @@ public void testGetSuccess() throws Exception { AssertionRunEventsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.any(Filter.class), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .getTimeseriesAspectValues( + any(), + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.any(Filter.class)); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getTotal(), 1); assertEquals(result.getFailed(), 0); assertEquals(result.getSucceeded(), 1); + assertEquals(result.getErrored(), 0); - com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = resolver.get(mockEnv).get().getRunEvents().get(0); + com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = + resolver.get(mockEnv).get().getRunEvents().get(0); assertEquals(graphqlRunEvent.getAssertionUrn(), assertionUrn.toString()); assertEquals(graphqlRunEvent.getAsserteeUrn(), asserteeUrn.toString()); assertEquals(graphqlRunEvent.getRunId(), "test-id"); - assertEquals(graphqlRunEvent.getStatus(), com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); + assertEquals( + graphqlRunEvent.getStatus(), + com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); assertEquals((float) graphqlRunEvent.getTimestampMillis(), 12L); + assertEquals((float) graphqlRunEvent.getLastObservedMillis(), 12L); assertEquals((float) graphqlRunEvent.getResult().getActualAggValue(), 10); assertEquals((long) graphqlRunEvent.getResult().getMissingCount(), 0L); assertEquals((long) graphqlRunEvent.getResult().getRowCount(), 1L); assertEquals((long) graphqlRunEvent.getResult().getUnexpectedCount(), 2L); - assertEquals(graphqlRunEvent.getResult().getType(), com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); + assertEquals( + graphqlRunEvent.getResult().getType(), + com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 8afec0a8895776..948088175e8e63 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.assertion; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.assertion.AssertionInfo; import com.linkedin.assertion.AssertionStdOperator; @@ -18,10 +22,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteAssertionResolverTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; @@ -31,85 +31,85 @@ public class DeleteAssertionResolverTest { public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L))) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), eq(Urn.createFromString(TEST_ASSERTION_URN))); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + any(), + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn(null); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L))) + .thenReturn(null); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), eq(Urn.createFromString(TEST_ASSERTION_URN))); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + any(), + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test @@ -117,89 +117,87 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(false); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(false); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(any(), eq(Urn.createFromString(TEST_ASSERTION_URN))); + + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + any(), + eq(Constants.ASSERTION_ENTITY_NAME), + eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(TEST_ASSERTION_URN)), + eq(Constants.ASSERTION_INFO_ASPECT_NAME), + eq(0L))) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), Mockito.any()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(any(), Mockito.any()); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ASSERTION_URN)), eq(true))) + .thenReturn(true); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_ASSERTION_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index c5b5725f23b7ae..0d236440e75c60 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,9 +40,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityAssertionsResolverTest { @Test public void testGetSuccess() throws Exception { @@ -49,73 +49,79 @@ public void testGetSuccess() throws Exception { Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); Urn assertionUrn = Urn.createFromString("urn:li:assertion:test-guid"); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(datasetUrn.toString()), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(assertionUrn) - .setType("Asserts")) - )) - ); - + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(datasetUrn.toString()), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship().setEntity(assertionUrn).setType("Asserts"))))); Map assertionAspects = new HashMap<>(); assertionAspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionKey().setAssertionId("test-guid").data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new AssertionKey().setAssertionId("test-guid").data()))); assertionAspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(new DatasetAssertionInfo() - .setDataset(datasetUrn) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setAggregation(AssertionStdAggregation.MAX) - .setOperator(AssertionStdOperator.EQUAL_TO) - .setFields(new UrnArray(ImmutableList.of( - Urn.createFromString("urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)") - ))) - .setParameters(new AssertionStdParameters().setValue(new AssertionStdParameter() - .setValue("10") - .setType( - AssertionStdParameterType.NUMBER))) - ).data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(datasetUrn) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setAggregation(AssertionStdAggregation.MAX) + .setOperator(AssertionStdOperator.EQUAL_TO) + .setFields( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)")))) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setValue("10") + .setType(AssertionStdParameterType.NUMBER)))) + .data()))); assertionAspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) - .data() - )) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn)), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(ImmutableMap.of( - assertionUrn, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn) - .setAspects(new EnvelopedAspectMap(assertionAspects)))); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) + .data()))); + + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn)), + Mockito.eq(null))) + .thenReturn( + ImmutableMap.of( + assertionUrn, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn) + .setAspects(new EnvelopedAspectMap(assertionAspects)))); + + Mockito.when(mockClient.exists(any(), Mockito.any(Urn.class), Mockito.eq(false))) + .thenReturn(true); EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); @@ -126,6 +132,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("count"), Mockito.eq(200))).thenReturn(10); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("includeSoftDeleted"), Mockito.eq(false))) + .thenReturn(false); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Dataset parentEntity = new Dataset(); @@ -134,38 +142,48 @@ public void testGetSuccess() throws Exception { EntityAssertionsResult result = resolver.get(mockEnv).get(); - Mockito.verify(graphClient, Mockito.times(1)).getRelatedEntities( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(graphClient, Mockito.times(1)) + .getRelatedEntities( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .exists(Mockito.any(), Mockito.any(), Mockito.any()); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); assertEquals(result.getTotal(), 1); - com.linkedin.datahub.graphql.generated.Assertion assertion = resolver.get(mockEnv).get().getAssertions().get(0); + com.linkedin.datahub.graphql.generated.Assertion assertion = + resolver.get(mockEnv).get().getAssertions().get(0); assertEquals(assertion.getUrn(), assertionUrn.toString()); assertEquals(assertion.getType(), EntityType.ASSERTION); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:hive"); - assertEquals(assertion.getInfo().getType(), com.linkedin.datahub.graphql.generated.AssertionType.DATASET); + assertEquals( + assertion.getInfo().getType(), + com.linkedin.datahub.graphql.generated.AssertionType.DATASET); assertEquals(assertion.getInfo().getDatasetAssertion().getDatasetUrn(), datasetUrn.toString()); - assertEquals(assertion.getInfo().getDatasetAssertion().getScope(), com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); - assertEquals(assertion.getInfo().getDatasetAssertion().getAggregation(), com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); - assertEquals(assertion.getInfo().getDatasetAssertion().getOperator(), com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), + assertEquals( + assertion.getInfo().getDatasetAssertion().getScope(), + com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); + assertEquals( + assertion.getInfo().getDatasetAssertion().getAggregation(), + com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); + assertEquals( + assertion.getInfo().getDatasetAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolverTest.java new file mode 100644 index 00000000000000..cf3c833cbba232 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/ReportAssertionResultResolverTest.java @@ -0,0 +1,160 @@ +package com.linkedin.datahub.graphql.resolvers.assertion; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.assertion.AssertionResult; +import com.linkedin.assertion.AssertionResultError; +import com.linkedin.assertion.AssertionRunEvent; +import com.linkedin.assertion.AssertionRunStatus; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AssertionResultErrorInput; +import com.linkedin.datahub.graphql.generated.AssertionResultErrorType; +import com.linkedin.datahub.graphql.generated.AssertionResultInput; +import com.linkedin.datahub.graphql.generated.AssertionResultType; +import com.linkedin.datahub.graphql.generated.StringMapEntryInput; +import com.linkedin.metadata.service.AssertionService; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Map; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class ReportAssertionResultResolverTest { + + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"); + + private static final Urn TEST_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:test"); + + private static final String customAssertionUrl = "https://dq-platform-native-url"; + + private static final AssertionResultInput TEST_INPUT = + new AssertionResultInput( + 0L, + AssertionResultType.ERROR, + ImmutableList.of(new StringMapEntryInput("prop1", "value1")), + customAssertionUrl, + new AssertionResultErrorInput( + AssertionResultErrorType.UNKNOWN_ERROR, "an unknown error occurred")); + + ; + + private static final AssertionRunEvent TEST_ASSERTION_RUN_EVENT = + new AssertionRunEvent() + .setAssertionUrn(TEST_ASSERTION_URN) + .setAsserteeUrn(TEST_DATASET_URN) + .setTimestampMillis(0L) + .setRunId("0") + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setType(com.linkedin.assertion.AssertionResultType.ERROR) + .setError( + new AssertionResultError() + .setType(com.linkedin.assertion.AssertionResultErrorType.UNKNOWN_ERROR) + .setProperties( + new StringMap(Map.of("message", "an unknown error occurred")))) + .setExternalUrl(customAssertionUrl) + .setNativeResults(new StringMap(Map.of("prop1", "value1")))); + + @Test + public void testGetSuccessReportAssertionRunEvent() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + ReportAssertionResultResolver resolver = new ReportAssertionResultResolver(mockedService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("result"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when( + mockedService.getEntityUrnForAssertion( + any(OperationContext.class), Mockito.eq(TEST_ASSERTION_URN))) + .thenReturn(TEST_DATASET_URN); + + resolver.get(mockEnv).get(); + + // Validate that we created the assertion + Mockito.verify(mockedService, Mockito.times(1)) + .addAssertionRunEvent( + any(OperationContext.class), + Mockito.eq(TEST_ASSERTION_URN), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(TEST_ASSERTION_RUN_EVENT.getTimestampMillis()), + Mockito.eq(TEST_ASSERTION_RUN_EVENT.getResult())); + } + + @Test + public void testGetUpdateAssertionUnauthorized() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + ReportAssertionResultResolver resolver = new ReportAssertionResultResolver(mockedService); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("result"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when( + mockedService.getEntityUrnForAssertion( + any(OperationContext.class), Mockito.eq(TEST_ASSERTION_URN))) + .thenReturn(TEST_DATASET_URN); + + CompletionException e = + expectThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + assert e.getMessage() + .contains( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + + // Validate that we created the assertion + Mockito.verify(mockedService, Mockito.times(0)) + .addAssertionRunEvent( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + } + + @Test + public void testGetAssertionServiceException() { + // Update resolver + AssertionService mockService = Mockito.mock(AssertionService.class); + + Mockito.when( + mockService.getEntityUrnForAssertion( + any(OperationContext.class), Mockito.eq(TEST_ASSERTION_URN))) + .thenReturn(TEST_DATASET_URN); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .addAssertionRunEvent( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + ReportAssertionResultResolver resolver = new ReportAssertionResultResolver(mockService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("result"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolverTest.java new file mode 100644 index 00000000000000..2ac6335ba9fea4 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/UpsertCustomAssertionResolverTest.java @@ -0,0 +1,345 @@ +package com.linkedin.datahub.graphql.resolvers.assertion; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.assertion.AssertionInfo; +import com.linkedin.assertion.AssertionSource; +import com.linkedin.assertion.AssertionSourceType; +import com.linkedin.assertion.AssertionType; +import com.linkedin.assertion.CustomAssertionInfo; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.PlatformInput; +import com.linkedin.datahub.graphql.generated.UpsertCustomAssertionInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.service.AssertionService; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpsertCustomAssertionResolverTest { + + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"); + + private static final String TEST_INVALID_DATASET_URN = "dataset.name"; + + private static final Urn TEST_FIELD_URN = + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD),field1)"); + private static final Urn TEST_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:test"); + + private static final String TEST_INVALID_ASSERTION_URN = "test"; + private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:actor:test"); + + private static final Urn TEST_PLATFORM_URN = UrnUtils.getUrn("urn:li:dataPlatform:DQplatform"); + + private static final String customAssertionType = "My custom category"; + private static final String customAssertionDescription = "Description of custom assertion"; + private static final String customAssertionUrl = "https://dq-platform-native-url"; + + private static final String customAssertionLogic = "custom script of assertion"; + + private static final UpsertCustomAssertionInput TEST_INPUT = + new UpsertCustomAssertionInput( + TEST_DATASET_URN.toString(), + customAssertionType, + customAssertionDescription, + "field1", + new PlatformInput(null, "DQplatform"), + customAssertionUrl, + customAssertionLogic); + + private static final UpsertCustomAssertionInput TEST_INPUT_MISSING_PLATFORM = + new UpsertCustomAssertionInput( + TEST_DATASET_URN.toString(), + customAssertionType, + customAssertionDescription, + "field1", + new PlatformInput(null, null), + customAssertionUrl, + customAssertionLogic); + + private static final UpsertCustomAssertionInput TEST_INPUT_INVALID_ENTITY_URN = + new UpsertCustomAssertionInput( + TEST_INVALID_DATASET_URN, + customAssertionType, + customAssertionDescription, + "field1", + new PlatformInput(null, "DQplatform"), + customAssertionUrl, + customAssertionLogic); + + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.CUSTOM) + .setDescription(customAssertionDescription) + .setExternalUrl(new Url(customAssertionUrl)) + .setSource( + new AssertionSource() + .setType(AssertionSourceType.EXTERNAL) + .setCreated( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(TEST_ACTOR_URN))) + .setCustomAssertion( + new CustomAssertionInfo() + .setEntity(TEST_DATASET_URN) + .setType(customAssertionType) + .setField(TEST_FIELD_URN) + .setLogic(customAssertionLogic)); + + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(TEST_PLATFORM_URN); + + @Test + public void testGetSuccessCreateAssertion() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(null); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when(mockedService.generateAssertionUrn()).thenReturn(TEST_ASSERTION_URN); + Mockito.when( + mockedService.getAssertionEntityResponse( + any(OperationContext.class), Mockito.eq(TEST_ASSERTION_URN))) + .thenReturn( + new EntityResponse() + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.ASSERTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())), + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))))) + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(TEST_ASSERTION_URN)); + + Assertion assertion = resolver.get(mockEnv).get(); + + // Don't validate each field since we have mapper tests already. + assertNotNull(assertion); + assertEquals(assertion.getUrn(), TEST_ASSERTION_URN.toString()); + + // Validate that we created the assertion + Mockito.verify(mockedService, Mockito.times(1)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.eq(TEST_ASSERTION_URN), + Mockito.eq(TEST_ASSERTION_INFO.getCustomAssertion().getEntity()), + Mockito.eq(TEST_ASSERTION_INFO.getDescription()), + Mockito.eq(TEST_ASSERTION_INFO.getExternalUrl().toString()), + Mockito.eq(TEST_DATA_PLATFORM_INSTANCE), + Mockito.eq(TEST_ASSERTION_INFO.getCustomAssertion())); + } + + @Test + public void testGetSuccessUpdateAssertion() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when( + mockedService.getAssertionEntityResponse( + any(OperationContext.class), Mockito.eq(TEST_ASSERTION_URN))) + .thenReturn( + new EntityResponse() + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.ASSERTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())), + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))))) + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(TEST_ASSERTION_URN)); + + Assertion assertion = resolver.get(mockEnv).get(); + + // Don't validate each field since we have mapper tests already. + assertNotNull(assertion); + assertEquals(assertion.getUrn(), TEST_ASSERTION_URN.toString()); + + // Validate that we created the assertion + Mockito.verify(mockedService, Mockito.times(1)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.eq(TEST_ASSERTION_URN), + Mockito.eq(TEST_ASSERTION_INFO.getCustomAssertion().getEntity()), + Mockito.eq(TEST_ASSERTION_INFO.getDescription()), + Mockito.eq(TEST_ASSERTION_INFO.getExternalUrl().toString()), + Mockito.eq(TEST_DATA_PLATFORM_INSTANCE), + Mockito.eq(TEST_ASSERTION_INFO.getCustomAssertion())); + } + + @Test + public void testGetUpdateAssertionUnauthorized() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + CompletionException e = + expectThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + assert e.getMessage() + .contains( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + + Mockito.verify(mockedService, Mockito.times(0)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + } + + @Test + public void testGetUpsertAssertionMissingPlatformFailure() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_MISSING_PLATFORM); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + CompletionException e = + expectThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + assert e.getMessage() + .contains( + "Failed to upsert Custom Assertion. Platform Name or Platform Urn must be specified."); + + Mockito.verify(mockedService, Mockito.times(0)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + } + + @Test + public void testGetUpsertAssertionInvalidAssertionUrn() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INVALID_ASSERTION_URN); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + RuntimeException e = expectThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); + assert e.getMessage().contains("invalid urn"); + + Mockito.verify(mockedService, Mockito.times(0)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + } + + @Test + public void testGetUpsertAssertionInvalidEntityUrn() throws Exception { + // Update resolver + AssertionService mockedService = Mockito.mock(AssertionService.class); + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockedService); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))) + .thenReturn(TEST_INPUT_INVALID_ENTITY_URN); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + RuntimeException e = expectThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); + assert e.getMessage().contains("invalid urn"); + + Mockito.verify(mockedService, Mockito.times(0)) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + } + + @Test + public void testGetAssertionServiceException() { + // Update resolver + AssertionService mockService = Mockito.mock(AssertionService.class); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .upsertCustomAssertion( + any(OperationContext.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + UpsertCustomAssertionResolver resolver = new UpsertCustomAssertionResolver(mockService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_ASSERTION_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 52d06f73dcfab9..020f74475ea607 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.mockito.ArgumentMatchers.any; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; @@ -9,18 +12,14 @@ import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; import java.util.Collections; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class ListAccessTokensResolverTest { @Test @@ -42,16 +41,21 @@ public void testGetSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); final Authentication testAuth = getAuthentication(mockEnv); - Mockito.when(mockClient.search( - Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(buildFilter(filters, Collections.emptyList())), - Mockito.any(SortCriterion.class), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) - .thenReturn(new SearchResult().setFrom(0).setNumEntities(0).setPageSize(0).setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(buildFilter(filters, Collections.emptyList(), null)), + Mockito.any(List.class), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setNumEntities(0) + .setPageSize(0) + .setEntities(new SearchEntityArray())); final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient); final ListAccessTokenResult listAccessTokenResult = resolver.get(mockEnv).get(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 4a948537ab4fed..9cf7e62e65e253 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -15,6 +18,7 @@ import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.chart.BrowseV2Resolver; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.browse.BrowseResultGroupV2; import com.linkedin.metadata.browse.BrowseResultGroupV2Array; import com.linkedin.metadata.browse.BrowseResultMetadata; @@ -24,20 +28,18 @@ import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.List; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class BrowseV2ResolverTest { private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @@ -45,26 +47,35 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - null, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); - - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + null, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); + + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -81,6 +92,7 @@ public static void testBrowseV2Success() throws Exception { @Test public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { + FormService mockFormService = Mockito.mock(FormService.class); ViewService mockService = Mockito.mock(ViewService.class); List orFilters = new ArrayList<>(); @@ -90,27 +102,35 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { facetFilterInput.setValues(ImmutableList.of("urn:li:corpuser:test")); andFilterInput.setAnd(ImmutableList.of(facetFilterInput)); orFilters.add(andFilterInput); - Filter filter = ResolverUtils.buildFilter(null, orFilters); - - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "test", - filter, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); - - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); + Filter filter = ResolverUtils.buildFilter(null, orFilters, mock(AspectRetriever.class)); + + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "test", + filter, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); + + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, mockService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -130,27 +150,36 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { @Test public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); + FormService mockFormService = Mockito.mock(FormService.class); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - viewInfo.getDefinition().getFilter(), - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); - - final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + viewInfo.getDefinition().getFilter(), + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); + + final BrowseV2Resolver resolver = + new BrowseV2Resolver(mockClient, viewService, mockFormService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -166,16 +195,25 @@ public static void testBrowseV2SuccessWithView() throws Exception { compareResultToExpectedData(result, getExpectedResult()); } - private static void compareResultToExpectedData(BrowseResultsV2 result, BrowseResultsV2 expected) { + private static void compareResultToExpectedData( + BrowseResultsV2 result, BrowseResultsV2 expected) { Assert.assertEquals(result.getCount(), expected.getCount()); Assert.assertEquals(result.getStart(), expected.getStart()); Assert.assertEquals(result.getTotal(), expected.getTotal()); Assert.assertEquals(result.getGroups().size(), expected.getGroups().size()); - result.getGroups().forEach(group -> { - Assert.assertTrue(expected.getGroups().stream().filter(g -> g.getName().equals(group.getName())).count() > 0); - }); + result + .getGroups() + .forEach( + group -> { + Assert.assertTrue( + expected.getGroups().stream() + .filter(g -> g.getName().equals(group.getName())) + .count() + > 0); + }); Assert.assertEquals(result.getMetadata().getPath(), expected.getMetadata().getPath()); - Assert.assertEquals(result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); + Assert.assertEquals( + result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); } private static BrowseResultsV2 getExpectedResult() { @@ -185,19 +223,22 @@ private static BrowseResultsV2 getExpectedResult() { results.setCount(10); List groups = new ArrayList<>(); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup1.setName("first group"); browseGroup1.setCount(5L); browseGroup1.setHasSubGroups(true); groups.add(browseGroup1); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup2.setName("second group"); browseGroup2.setCount(4L); browseGroup2.setHasSubGroups(false); groups.add(browseGroup2); results.setGroups(groups); - com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); + com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = + new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); resultMetadata.setPath(ImmutableList.of("test", "path")); resultMetadata.setTotalNumEntities(100L); results.setMetadata(resultMetadata); @@ -212,60 +253,51 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - BrowseResultV2 result - ) throws Exception { + BrowseResultV2 result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.browseV2( - Mockito.eq(entityName), - Mockito.eq(path), - Mockito.eq(filter), - Mockito.eq(query), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.browseV2( + Mockito.any(), + Mockito.eq(ImmutableList.of(entityName)), + Mockito.eq(path), + Mockito.eq(filter), + Mockito.eq(query), + Mockito.eq(start), + Mockito.eq(limit))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(any(), Mockito.eq(viewUrn))).thenReturn(viewInfo); return service; } + private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } - private BrowseV2ResolverTest() { } - + private BrowseV2ResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java index 659e6aea740ec1..75abf1d48a15c8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -16,12 +18,10 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityBrowsePathsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { @@ -30,9 +30,7 @@ public void testGetSuccess() throws Exception { List path = ImmutableList.of("prod", "mysql"); Mockito.when(mockType.browsePaths(Mockito.eq(TEST_ENTITY_URN), Mockito.any())) - .thenReturn(ImmutableList.of( - new BrowsePath(path)) - ); + .thenReturn(ImmutableList.of(new BrowsePath(path))); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -55,9 +53,9 @@ public void testGetSuccess() throws Exception { @Test public void testGetBrowsePathsException() throws Exception { BrowsableEntityType mockType = Mockito.mock(BrowsableEntityType.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockType).browsePaths( - Mockito.any(), - Mockito.any()); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockType) + .browsePaths(Mockito.any(), Mockito.any()); EntityBrowsePathsResolver resolver = new EntityBrowsePathsResolver(mockType); @@ -75,4 +73,4 @@ public void testGetBrowsePathsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolverTest.java new file mode 100644 index 00000000000000..280adcf896d5e9 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/AddBusinessAttributeResolverTest.java @@ -0,0 +1,153 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.google.common.collect.ImmutableList; +import com.linkedin.businessattribute.BusinessAttributeAssociation; +import com.linkedin.businessattribute.BusinessAttributes; +import com.linkedin.common.urn.BusinessAttributeUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AddBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.ResourceRefInput; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class AddBusinessAttributeResolverTest { + private static final String BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:7d0c4283-de02-4043-aaf2-698b04274658"; + private static final String RESOURCE_URN = + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_bar)"; + private EntityService mockService; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + + private void init() { + mockService = getMockEntityService(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + @Test + public void testSuccess() throws Exception { + init(); + setupAllowContext(); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(addBusinessAttributeInput()); + Mockito.when( + mockService.exists( + any(OperationContext.class), + eq(Urn.createFromString((BUSINESS_ATTRIBUTE_URN))), + eq(true))) + .thenReturn(true); + + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(RESOURCE_URN)), + eq(Constants.BUSINESS_ATTRIBUTE_ASPECT), + eq(0L))) + .thenReturn(new BusinessAttributes()); + + AddBusinessAttributeResolver addBusinessAttributeResolver = + new AddBusinessAttributeResolver(mockService); + addBusinessAttributeResolver.get(mockEnv).get(); + + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal(any(OperationContext.class), any(AspectsBatchImpl.class), eq(false)); + } + + @Test + public void testBusinessAttributeAlreadyAdded() throws Exception { + init(); + setupAllowContext(); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(addBusinessAttributeInput()); + Mockito.when( + mockService.exists( + any(OperationContext.class), + eq(Urn.createFromString((BUSINESS_ATTRIBUTE_URN))), + eq(true))) + .thenReturn(true); + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(RESOURCE_URN)), + eq(Constants.BUSINESS_ATTRIBUTE_ASPECT), + eq(0L))) + .thenReturn(businessAttributes()); + + AddBusinessAttributeResolver addBusinessAttributeResolver = + new AddBusinessAttributeResolver(mockService); + addBusinessAttributeResolver.get(mockEnv).get(); + + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal(any(OperationContext.class), any(AspectsBatchImpl.class), eq(false)); + } + + @Test + public void testBusinessAttributeNotExists() throws Exception { + init(); + setupAllowContext(); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(addBusinessAttributeInput()); + Mockito.when( + mockService.exists( + any(OperationContext.class), + eq(Urn.createFromString((BUSINESS_ATTRIBUTE_URN))), + eq(true))) + .thenReturn(false); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(Urn.createFromString(RESOURCE_URN)), eq(true))) + .thenReturn(true); + + AddBusinessAttributeResolver addBusinessAttributeResolver = + new AddBusinessAttributeResolver(mockService); + RuntimeException exception = + expectThrows(RuntimeException.class, () -> addBusinessAttributeResolver.get(mockEnv).get()); + assertTrue( + exception + .getMessage() + .equals(String.format("This urn does not exist: %s", BUSINESS_ATTRIBUTE_URN))); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), any(AspectsBatchImpl.class), eq(false)); + } + + public AddBusinessAttributeInput addBusinessAttributeInput() { + AddBusinessAttributeInput addBusinessAttributeInput = new AddBusinessAttributeInput(); + addBusinessAttributeInput.setBusinessAttributeUrn(BUSINESS_ATTRIBUTE_URN); + addBusinessAttributeInput.setResourceUrn(resourceRefInput()); + return addBusinessAttributeInput; + } + + private ImmutableList resourceRefInput() { + ResourceRefInput resourceRefInput = new ResourceRefInput(); + resourceRefInput.setResourceUrn(RESOURCE_URN); + return ImmutableList.of(resourceRefInput); + } + + private BusinessAttributes businessAttributes() throws URISyntaxException { + BusinessAttributes businessAttributes = new BusinessAttributes(); + BusinessAttributeAssociation businessAttributeAssociation = new BusinessAttributeAssociation(); + businessAttributeAssociation.setBusinessAttributeUrn( + BusinessAttributeUrn.createFromString(BUSINESS_ATTRIBUTE_URN)); + businessAttributes.setBusinessAttribute(businessAttributeAssociation); + return businessAttributes; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeProposalMatcher.java new file mode 100644 index 00000000000000..abed58aa883760 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeProposalMatcher.java @@ -0,0 +1,37 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.GenericAspect; +import com.linkedin.mxe.MetadataChangeProposal; +import org.mockito.ArgumentMatcher; + +public class CreateBusinessAttributeProposalMatcher + implements ArgumentMatcher { + private MetadataChangeProposal left; + + public CreateBusinessAttributeProposalMatcher(MetadataChangeProposal left) { + this.left = left; + } + + @Override + public boolean matches(MetadataChangeProposal right) { + return left.getEntityType().equals(right.getEntityType()) + && left.getAspectName().equals(right.getAspectName()) + && left.getChangeType().equals(right.getChangeType()) + && businessAttributeInfoMatch(left.getAspect(), right.getAspect()); + } + + private boolean businessAttributeInfoMatch(GenericAspect left, GenericAspect right) { + BusinessAttributeInfo leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", BusinessAttributeInfo.class); + + BusinessAttributeInfo rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", BusinessAttributeInfo.class); + + return leftProps.getName().equals(rightProps.getName()) + && leftProps.getDescription().equals(rightProps.getDescription()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolverTest.java new file mode 100644 index 00000000000000..2623a6b25811ad --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/CreateBusinessAttributeResolverTest.java @@ -0,0 +1,248 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.datahub.authentication.Authentication; +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.businessattribute.BusinessAttributeKey; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.CreateBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.BusinessAttributeUtils; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.BusinessAttributeService; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.schema.BooleanType; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.ExecutionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class CreateBusinessAttributeResolverTest { + + private static final String BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:business-attribute-1"; + private static final String TEST_BUSINESS_ATTRIBUTE_NAME = "test-business-attribute"; + private static final String TEST_BUSINESS_ATTRIBUTE_DESCRIPTION = "test-description"; + private static final CreateBusinessAttributeInput TEST_INPUT = + new CreateBusinessAttributeInput( + BUSINESS_ATTRIBUTE_URN, + TEST_BUSINESS_ATTRIBUTE_NAME, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION, + SchemaFieldDataType.BOOLEAN); + private static final CreateBusinessAttributeInput TEST_INPUT_NULL_NAME = + new CreateBusinessAttributeInput( + BUSINESS_ATTRIBUTE_URN, + null, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION, + SchemaFieldDataType.BOOLEAN); + private EntityClient mockClient; + private EntityService mockService; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + private BusinessAttributeService businessAttributeService; + private Authentication mockAuthentication; + private SearchResult searchResult; + + private void init() { + mockClient = Mockito.mock(EntityClient.class); + mockService = getMockEntityService(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + businessAttributeService = Mockito.mock(BusinessAttributeService.class); + mockAuthentication = Mockito.mock(Authentication.class); + searchResult = Mockito.mock(SearchResult.class); + } + + @Test + public void testSuccess() throws Exception { + // Mock + init(); + setupAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockClient.exists(any(OperationContext.class), Mockito.any(Urn.class))) + .thenReturn(false); + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + Mockito.isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(0); + Mockito.when( + mockClient.ingestProposal( + any(OperationContext.class), Mockito.any(MetadataChangeProposal.class))) + .thenReturn(BUSINESS_ATTRIBUTE_URN); + Mockito.when( + businessAttributeService.getBusinessAttributeEntityResponse( + any(OperationContext.class), Mockito.any(Urn.class))) + .thenReturn(getBusinessAttributeEntityResponse()); + + // Execute + CreateBusinessAttributeResolver resolver = + new CreateBusinessAttributeResolver(mockClient, mockService, businessAttributeService); + resolver.get(mockEnv).get(); + + // verify + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + any(OperationContext.class), + Mockito.argThat(new CreateBusinessAttributeProposalMatcher(metadataChangeProposal()))); + } + + @Test + public void testNameIsNull() throws Exception { + // Mock + init(); + setupAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NULL_NAME); + Mockito.when(mockClient.exists(any(OperationContext.class), Mockito.any(Urn.class))) + .thenReturn(false); + + // Execute + CreateBusinessAttributeResolver resolver = + new CreateBusinessAttributeResolver(mockClient, mockService, businessAttributeService); + ExecutionException actualException = + expectThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + + // verify + assertTrue( + actualException + .getCause() + .getMessage() + .equals("Failed to create Business Attribute with name: null")); + + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + @Test + public void testNameAlreadyExists() throws Exception { + // Mock + init(); + setupAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockClient.exists(any(OperationContext.class), Mockito.any(Urn.class))) + .thenReturn(false); + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + Mockito.isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(1); + + // Execute + CreateBusinessAttributeResolver resolver = + new CreateBusinessAttributeResolver(mockClient, mockService, businessAttributeService); + ExecutionException exception = + expectThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + + // Verify + assertTrue( + exception + .getCause() + .getMessage() + .equals( + "\"test-business-attribute\" already exists as Business Attribute. Please pick a unique name.")); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + @Test + public void testUnauthorized() throws Exception { + init(); + setupDenyContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + + CreateBusinessAttributeResolver resolver = + new CreateBusinessAttributeResolver(mockClient, mockService, businessAttributeService); + AuthorizationException exception = + expectThrows(AuthorizationException.class, () -> resolver.get(mockEnv)); + + assertTrue( + exception + .getMessage() + .equals( + "Unauthorized to perform this action. Please contact your DataHub administrator.")); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + private EntityResponse getBusinessAttributeEntityResponse() throws Exception { + EnvelopedAspectMap map = new EnvelopedAspectMap(); + BusinessAttributeInfo businessAttributeInfo = businessAttributeInfo(); + map.put( + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(businessAttributeInfo.data()))); + EntityResponse entityResponse = new EntityResponse(); + entityResponse.setAspects(map); + entityResponse.setUrn(Urn.createFromString(BUSINESS_ATTRIBUTE_URN)); + return entityResponse; + } + + private MetadataChangeProposal metadataChangeProposal() { + BusinessAttributeKey businessAttributeKey = new BusinessAttributeKey(); + BusinessAttributeInfo info = new BusinessAttributeInfo(); + info.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME); + info.setName(TEST_BUSINESS_ATTRIBUTE_NAME); + info.setDescription(TEST_BUSINESS_ATTRIBUTE_DESCRIPTION); + info.setType( + BusinessAttributeUtils.mapSchemaFieldDataType(SchemaFieldDataType.BOOLEAN), + SetMode.IGNORE_NULL); + return MutationUtils.buildMetadataChangeProposalWithKey( + businessAttributeKey, + BUSINESS_ATTRIBUTE_ENTITY_NAME, + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + info); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private void setupDenyContext() { + mockContext = getMockDenyContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private BusinessAttributeInfo businessAttributeInfo() { + BusinessAttributeInfo businessAttributeInfo = new BusinessAttributeInfo(); + businessAttributeInfo.setName(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setDescription(TEST_BUSINESS_ATTRIBUTE_DESCRIPTION); + com.linkedin.schema.SchemaFieldDataType schemaFieldDataType = + new com.linkedin.schema.SchemaFieldDataType(); + schemaFieldDataType.setType( + com.linkedin.schema.SchemaFieldDataType.Type.create(new BooleanType())); + businessAttributeInfo.setType(schemaFieldDataType); + return businessAttributeInfo; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolverTest.java new file mode 100644 index 00000000000000..3e37aa5cd408d8 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/DeleteBusinessAttributeResolverTest.java @@ -0,0 +1,109 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.entity.client.EntityClient; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DeleteBusinessAttributeResolverTest { + private static final String TEST_BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:7d0c4283-de02-4043-aaf2-698b04274658"; + private EntityClient mockClient; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + private Authentication mockAuthentication; + + private void init() { + mockClient = Mockito.mock(EntityClient.class); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + mockAuthentication = Mockito.mock(Authentication.class); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private void setupDenyContext() { + mockContext = getMockDenyContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + @Test + public void testSuccess() throws Exception { + init(); + setupAllowContext(); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockClient.exists( + any(OperationContext.class), eq(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN)))) + .thenReturn(true); + + DeleteBusinessAttributeResolver resolver = new DeleteBusinessAttributeResolver(mockClient); + resolver.get(mockEnv).get(); + + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + any(OperationContext.class), + Mockito.eq(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN))); + } + + @Test + public void testUnauthorized() throws Exception { + init(); + setupDenyContext(); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + + DeleteBusinessAttributeResolver resolver = new DeleteBusinessAttributeResolver(mockClient); + AuthorizationException actualException = + expectThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); + assertTrue( + actualException + .getMessage() + .equals( + "Unauthorized to perform this action. Please contact your DataHub administrator.")); + + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + any(OperationContext.class), + Mockito.eq(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN))); + } + + @Test + public void testEntityNotExists() throws Exception { + init(); + setupAllowContext(); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockClient.exists( + any(OperationContext.class), eq(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN)))) + .thenReturn(false); + + DeleteBusinessAttributeResolver resolver = new DeleteBusinessAttributeResolver(mockClient); + RuntimeException actualException = + expectThrows(RuntimeException.class, () -> resolver.get(mockEnv).get()); + assertTrue( + actualException + .getMessage() + .equals(String.format("This urn does not exist: %s", TEST_BUSINESS_ATTRIBUTE_URN))); + + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + any(OperationContext.class), + Mockito.eq(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN))); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolverTest.java new file mode 100644 index 00000000000000..3e7df667160624 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/RemoveBusinessAttributeResolverTest.java @@ -0,0 +1,122 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.google.common.collect.ImmutableList; +import com.linkedin.businessattribute.BusinessAttributeAssociation; +import com.linkedin.businessattribute.BusinessAttributes; +import com.linkedin.common.urn.BusinessAttributeUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AddBusinessAttributeInput; +import com.linkedin.datahub.graphql.generated.ResourceRefInput; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; +import java.util.concurrent.ExecutionException; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class RemoveBusinessAttributeResolverTest { + private static final String BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:7d0c4283-de02-4043-aaf2-698b04274658"; + private static final String RESOURCE_URN = + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,SampleCypressHiveDataset,PROD),field_bar)"; + private EntityService mockService; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + + @BeforeMethod + private void init() { + mockService = getMockEntityService(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + @Test + public void testSuccess() throws Exception { + setupAllowContext(); + + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(addBusinessAttributeInput()); + + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(RESOURCE_URN)), + eq(Constants.BUSINESS_ATTRIBUTE_ASPECT), + eq(0L))) + .thenReturn(businessAttributes()); + + RemoveBusinessAttributeResolver resolver = new RemoveBusinessAttributeResolver(mockService); + resolver.get(mockEnv).get(); + + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + any(OperationContext.class), Mockito.any(AspectsBatchImpl.class), eq(false)); + } + + @Test + public void testBusinessAttributeNotAdded() throws Exception { + setupAllowContext(); + AddBusinessAttributeInput input = addBusinessAttributeInput(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(RESOURCE_URN)), + eq(Constants.BUSINESS_ATTRIBUTE_ASPECT), + eq(0L))) + .thenReturn(new BusinessAttributes()); + + RemoveBusinessAttributeResolver resolver = new RemoveBusinessAttributeResolver(mockService); + ExecutionException actualException = + expectThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + assertTrue( + actualException + .getCause() + .getMessage() + .equals( + String.format( + "Failed to remove Business Attribute with urn %s from resources %s", + input.getBusinessAttributeUrn(), input.getResourceUrn()))); + + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + any(OperationContext.class), Mockito.any(AspectsBatchImpl.class), eq(false)); + } + + public AddBusinessAttributeInput addBusinessAttributeInput() { + AddBusinessAttributeInput addBusinessAttributeInput = new AddBusinessAttributeInput(); + addBusinessAttributeInput.setBusinessAttributeUrn(BUSINESS_ATTRIBUTE_URN); + addBusinessAttributeInput.setResourceUrn(resourceRefInput()); + return addBusinessAttributeInput; + } + + private ImmutableList resourceRefInput() { + ResourceRefInput resourceRefInput = new ResourceRefInput(); + resourceRefInput.setResourceUrn(RESOURCE_URN); + return ImmutableList.of(resourceRefInput); + } + + private BusinessAttributes businessAttributes() throws URISyntaxException { + BusinessAttributes businessAttributes = new BusinessAttributes(); + BusinessAttributeAssociation businessAttributeAssociation = new BusinessAttributeAssociation(); + businessAttributeAssociation.setBusinessAttributeUrn( + BusinessAttributeUrn.createFromString(BUSINESS_ATTRIBUTE_URN)); + businessAttributes.setBusinessAttribute(businessAttributeAssociation); + return businessAttributes; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolverTest.java new file mode 100644 index 00000000000000..e7530f30dded0c --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateBusinessAttributeResolverTest.java @@ -0,0 +1,260 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.datahub.authentication.Authentication; +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; +import com.linkedin.datahub.graphql.generated.UpdateBusinessAttributeInput; +import com.linkedin.datahub.graphql.resolvers.mutate.util.BusinessAttributeUtils; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.AspectUtils; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.service.BusinessAttributeService; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.schema.BooleanType; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpdateBusinessAttributeResolverTest { + private static final String TEST_BUSINESS_ATTRIBUTE_NAME = "test-business-attribute"; + private static final String TEST_BUSINESS_ATTRIBUTE_DESCRIPTION = "test-description"; + private static final String TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED = + "test-business-attribute-updated"; + private static final String TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED = + "test-description-updated"; + private static final String TEST_BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:7d0c4283-de02-4043-aaf2-698b04274658"; + private static final Urn TEST_BUSINESS_ATTRIBUTE_URN_OBJ = + UrnUtils.getUrn(TEST_BUSINESS_ATTRIBUTE_URN); + private EntityClient mockClient; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + private BusinessAttributeService businessAttributeService; + private Authentication mockAuthentication; + private SearchResult searchResult; + + private void init() { + mockClient = Mockito.mock(EntityClient.class); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + businessAttributeService = Mockito.mock(BusinessAttributeService.class); + mockAuthentication = Mockito.mock(Authentication.class); + searchResult = Mockito.mock(SearchResult.class); + } + + @Test + public void testSuccess() throws Exception { + init(); + setupAllowContext(); + final UpdateBusinessAttributeInput testInput = + new UpdateBusinessAttributeInput( + TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED, + SchemaFieldDataType.NUMBER); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockClient.exists(any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ))) + .thenReturn(true); + Mockito.when( + businessAttributeService.getBusinessAttributeEntityResponse( + any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ))) + .thenReturn(getBusinessAttributeEntityResponse()); + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + Mockito.isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(0); + Mockito.when( + mockClient.ingestProposal( + any(OperationContext.class), Mockito.any(MetadataChangeProposal.class))) + .thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + + UpdateBusinessAttributeResolver resolver = + new UpdateBusinessAttributeResolver(mockClient, businessAttributeService); + resolver.get(mockEnv).get(); + + // verify + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + any(OperationContext.class), + Mockito.argThat( + new CreateBusinessAttributeProposalMatcher(updatedMetadataChangeProposal()))); + } + + @Test + public void testNotExists() throws Exception { + init(); + setupAllowContext(); + final UpdateBusinessAttributeInput testInput = + new UpdateBusinessAttributeInput( + TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED, + SchemaFieldDataType.NUMBER); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockClient.exists(any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ))) + .thenReturn(false); + + UpdateBusinessAttributeResolver resolver = + new UpdateBusinessAttributeResolver(mockClient, businessAttributeService); + RuntimeException expectedException = + expectThrows(RuntimeException.class, () -> resolver.get(mockEnv)); + assertTrue( + expectedException + .getMessage() + .equals(String.format("This urn does not exist: %s", TEST_BUSINESS_ATTRIBUTE_URN))); + + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + @Test + public void testNameConflict() throws Exception { + init(); + setupAllowContext(); + final UpdateBusinessAttributeInput testInput = + new UpdateBusinessAttributeInput( + TEST_BUSINESS_ATTRIBUTE_NAME, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED, + SchemaFieldDataType.NUMBER); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockClient.exists(any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ))) + .thenReturn(true); + Mockito.when( + businessAttributeService.getBusinessAttributeEntityResponse( + any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ))) + .thenReturn(getBusinessAttributeEntityResponse()); + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + Mockito.isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(1); + + UpdateBusinessAttributeResolver resolver = + new UpdateBusinessAttributeResolver(mockClient, businessAttributeService); + + ExecutionException exception = + expectThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + + // Verify + assertTrue( + exception + .getCause() + .getMessage() + .equals( + "\"test-business-attribute\" already exists as Business Attribute. Please pick a unique name.")); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + @Test + public void testNotAuthorized() throws Exception { + init(); + setupDenyContext(); + final UpdateBusinessAttributeInput testInput = + new UpdateBusinessAttributeInput( + TEST_BUSINESS_ATTRIBUTE_NAME, + TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED, + SchemaFieldDataType.NUMBER); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + + UpdateBusinessAttributeResolver resolver = + new UpdateBusinessAttributeResolver(mockClient, businessAttributeService); + AuthorizationException exception = + expectThrows(AuthorizationException.class, () -> resolver.get(mockEnv)); + + assertTrue( + exception + .getMessage() + .equals( + "Unauthorized to perform this action. Please contact your DataHub administrator.")); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + private EntityResponse getBusinessAttributeEntityResponse() throws Exception { + Map result = new HashMap<>(); + EnvelopedAspectMap map = new EnvelopedAspectMap(); + BusinessAttributeInfo businessAttributeInfo = businessAttributeInfo(); + map.put( + BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(businessAttributeInfo.data()))); + EntityResponse entityResponse = new EntityResponse(); + entityResponse.setAspects(map); + entityResponse.setUrn(Urn.createFromString(TEST_BUSINESS_ATTRIBUTE_URN)); + return entityResponse; + } + + private MetadataChangeProposal updatedMetadataChangeProposal() { + BusinessAttributeInfo info = new BusinessAttributeInfo(); + info.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED); + info.setName(TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED); + info.setDescription(TEST_BUSINESS_ATTRIBUTE_DESCRIPTION_UPDATED); + info.setType( + BusinessAttributeUtils.mapSchemaFieldDataType(SchemaFieldDataType.BOOLEAN), + SetMode.IGNORE_NULL); + return AspectUtils.buildMetadataChangeProposal( + TEST_BUSINESS_ATTRIBUTE_URN_OBJ, BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, info); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private void setupDenyContext() { + mockContext = getMockDenyContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private BusinessAttributeInfo businessAttributeInfo() { + BusinessAttributeInfo businessAttributeInfo = new BusinessAttributeInfo(); + businessAttributeInfo.setName(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setDescription(TEST_BUSINESS_ATTRIBUTE_DESCRIPTION); + com.linkedin.schema.SchemaFieldDataType schemaFieldDataType = + new com.linkedin.schema.SchemaFieldDataType(); + schemaFieldDataType.setType( + com.linkedin.schema.SchemaFieldDataType.Type.create(new BooleanType())); + businessAttributeInfo.setType(schemaFieldDataType); + return businessAttributeInfo; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateNameResolverTest.java new file mode 100644 index 00000000000000..40e2b43e89688c --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/businessattribute/UpdateNameResolverTest.java @@ -0,0 +1,173 @@ +package com.linkedin.datahub.graphql.resolvers.businessattribute; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.isNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.expectThrows; + +import com.datahub.authentication.Authentication; +import com.linkedin.businessattribute.BusinessAttributeInfo; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.UpdateNameInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.UpdateNameResolver; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.schema.BooleanType; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.ExecutionException; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdateNameResolverTest { + private static final String TEST_BUSINESS_ATTRIBUTE_NAME = "test-business-attribute"; + private static final String TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED = + "test-business-attribute-updated"; + private static final String TEST_BUSINESS_ATTRIBUTE_DESCRIPTION = "test-description"; + private static final String TEST_BUSINESS_ATTRIBUTE_URN = + "urn:li:businessAttribute:7d0c4283-de02-4043-aaf2-698b04274658"; + private static final Urn TEST_BUSINESS_ATTRIBUTE_URN_OBJ = + UrnUtils.getUrn(TEST_BUSINESS_ATTRIBUTE_URN); + private EntityClient mockClient; + private EntityService mockService; + private QueryContext mockContext; + private DataFetchingEnvironment mockEnv; + private Authentication mockAuthentication; + private SearchResult searchResult; + + @BeforeMethod + private void init() { + mockClient = Mockito.mock(EntityClient.class); + mockService = getMockEntityService(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + mockAuthentication = Mockito.mock(Authentication.class); + searchResult = Mockito.mock(SearchResult.class); + } + + @Test + public void testSuccess() throws Exception { + setupAllowContext(); + UpdateNameInput testInput = + new UpdateNameInput(TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED, TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ), eq(true))) + .thenReturn(true); + + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ), + eq(Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME), + eq(0L))) + .thenReturn(businessAttributeInfo()); + + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(0); + + BusinessAttributeInfo updatedBusinessAttributeInfo = businessAttributeInfo(); + updatedBusinessAttributeInfo.setName(TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED); + updatedBusinessAttributeInfo.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME_UPDATED); + MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_BUSINESS_ATTRIBUTE_URN_OBJ, + Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME, + updatedBusinessAttributeInfo); + + UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); + resolver.get(mockEnv).get(); + + // verify + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + any(OperationContext.class), + Mockito.argThat(new CreateBusinessAttributeProposalMatcher(proposal)), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); + } + + @Test + public void testNameConflict() throws Exception { + setupAllowContext(); + UpdateNameInput testInput = + new UpdateNameInput(TEST_BUSINESS_ATTRIBUTE_NAME, TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getArgument("urn")).thenReturn(TEST_BUSINESS_ATTRIBUTE_URN); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(TEST_BUSINESS_ATTRIBUTE_URN_OBJ), + eq(Constants.BUSINESS_ATTRIBUTE_INFO_ASPECT_NAME), + eq(0L))) + .thenReturn(businessAttributeInfo()); + + Mockito.when( + mockClient.filter( + Mockito.any(OperationContext.class), + Mockito.any(String.class), + Mockito.any(Filter.class), + isNull(), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(searchResult); + Mockito.when(searchResult.getNumEntities()).thenReturn(1); + + UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); + ExecutionException exception = + expectThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); + + assertTrue( + exception + .getCause() + .getMessage() + .equals( + "\"test-business-attribute\" already exists as Business Attribute. Please pick a unique name.")); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any(MetadataChangeProposal.class)); + } + + private void setupAllowContext() { + mockContext = getMockAllowContext(); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuthentication); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + } + + private BusinessAttributeInfo businessAttributeInfo() { + BusinessAttributeInfo businessAttributeInfo = new BusinessAttributeInfo(); + businessAttributeInfo.setName(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setFieldPath(TEST_BUSINESS_ATTRIBUTE_NAME); + businessAttributeInfo.setDescription(TEST_BUSINESS_ATTRIBUTE_DESCRIPTION); + com.linkedin.schema.SchemaFieldDataType schemaFieldDataType = + new com.linkedin.schema.SchemaFieldDataType(); + schemaFieldDataType.setType( + com.linkedin.schema.SchemaFieldDataType.Type.create(new BooleanType())); + businessAttributeInfo.setType(schemaFieldDataType); + return businessAttributeInfo; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolverTest.java new file mode 100644 index 00000000000000..5bc5332e711fdf --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/connection/UpsertConnectionResolverTest.java @@ -0,0 +1,128 @@ +package com.linkedin.datahub.graphql.resolvers.connection; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.DataPlatformInstance; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.connection.DataHubConnectionDetails; +import com.linkedin.connection.DataHubJsonConnection; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubConnection; +import com.linkedin.datahub.graphql.generated.DataHubConnectionDetailsType; +import com.linkedin.datahub.graphql.generated.DataHubJsonConnectionInput; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.UpsertDataHubConnectionInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.connection.ConnectionService; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.services.SecretService; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpsertConnectionResolverTest { + + private ConnectionService connectionService; + private SecretService secretService; + private UpsertConnectionResolver resolver; + + @BeforeMethod + public void setUp() { + connectionService = Mockito.mock(ConnectionService.class); + secretService = Mockito.mock(SecretService.class); + Mockito.when(secretService.encrypt("{}")).thenReturn("encrypted"); + Mockito.when(secretService.decrypt("encrypted")).thenReturn("{}"); + resolver = new UpsertConnectionResolver(connectionService, secretService); + } + + @Test + public void testGetAuthorized() throws Exception { + // Mock inputs + Urn connectionUrn = UrnUtils.getUrn("urn:li:dataHubConnection:test-id"); + Urn platformUrn = UrnUtils.getUrn("urn:li:dataPlatform:slack"); + + final UpsertDataHubConnectionInput input = new UpsertDataHubConnectionInput(); + input.setId(connectionUrn.getId()); + input.setPlatformUrn(platformUrn.toString()); + input.setType(DataHubConnectionDetailsType.JSON); + input.setName("test-name"); + input.setJson(new DataHubJsonConnectionInput("{}")); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + final DataHubConnectionDetails details = + new DataHubConnectionDetails() + .setType(com.linkedin.connection.DataHubConnectionDetailsType.JSON) + .setJson(new DataHubJsonConnection().setEncryptedBlob("encrypted")); + + final DataPlatformInstance platformInstance = + new DataPlatformInstance().setPlatform(platformUrn); + + when(connectionService.upsertConnection( + any(OperationContext.class), + Mockito.eq(input.getId()), + Mockito.eq(platformUrn), + Mockito.eq(details.getType()), + Mockito.eq(details.getJson()), + Mockito.any(String.class))) + .thenReturn(connectionUrn); + when(connectionService.getConnectionEntityResponse( + any(OperationContext.class), Mockito.eq(connectionUrn))) + .thenReturn( + new EntityResponse() + .setUrn(connectionUrn) + .setEntityName(Constants.DATAHUB_CONNECTION_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_CONNECTION_DETAILS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_CONNECTION_DETAILS_ASPECT_NAME) + .setValue(new Aspect(details.data())), + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME) + .setValue(new Aspect(platformInstance.data())))))); + + DataHubConnection actual = resolver.get(mockEnv).get(); + + Assert.assertEquals(actual.getType(), EntityType.DATAHUB_CONNECTION); + Assert.assertEquals(actual.getUrn(), connectionUrn.toString()); + Assert.assertEquals(actual.getPlatform().getUrn(), platformUrn.toString()); + Assert.assertEquals(actual.getDetails().getType(), input.getType()); + Assert.assertEquals(actual.getDetails().getJson().getBlob(), input.getJson().getBlob()); + } + + @Test + public void testGetUnAuthorized() { + // Mock inputs + Urn connectionUrn = UrnUtils.getUrn("urn:li:dataHubConnection:test-id"); + + final UpsertDataHubConnectionInput input = new UpsertDataHubConnectionInput(); + input.setId(connectionUrn.getId()); + input.setPlatformUrn(connectionUrn.toString()); + input.setType(DataHubConnectionDetailsType.JSON); + + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 39a08ca26167d1..48732727762eea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -20,66 +24,65 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ContainerEntitiesResolverTest { - private static final ContainerEntitiesInput TEST_INPUT = new ContainerEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final ContainerEntitiesInput TEST_INPUT = + new ContainerEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; - final Criterion filterCriterion = new Criterion() - .setField("container.keyword") - .setCondition(Condition.EQUAL) - .setValue(containerUrn); + final Criterion filterCriterion = + new Criterion() + .setField("container.keyword") + .setCondition(Condition.EQUAL) + .setValue(containerUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + any(), + Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -92,6 +95,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index 92f8dfc4e1d67c..7e6c59dca7c224 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -1,5 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.container.Container; @@ -14,18 +20,12 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentContainersResolverTest { @Test @@ -33,6 +33,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -42,77 +44,88 @@ public void testGetSuccess() throws Exception { datasetEntity.setType(EntityType.DATASET); Mockito.when(mockEnv.getSource()).thenReturn(datasetEntity); - final Container parentContainer1 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); - final Container parentContainer2 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); + final Container parentContainer1 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); + final Container parentContainer2 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); Map datasetAspects = new HashMap<>(); - datasetAspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); + datasetAspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); Map parentContainer1Aspects = new HashMap<>(); - parentContainer1Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_schema").data() - ))); - parentContainer1Aspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - parentContainer2.data() - ))); + parentContainer1Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_schema").data()))); + parentContainer1Aspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer2.data()))); Map parentContainer2Aspects = new HashMap<>(); - parentContainer2Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_database").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(datasetUrn.getEntityType()), - Mockito.eq(datasetUrn), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer1.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer2.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + parentContainer2Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_database").data()))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(datasetUrn.getEntityType()), + Mockito.eq(datasetUrn), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer1.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer2.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); ParentContainersResolver resolver = new ParentContainersResolver(mockClient); ParentContainersResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); - assertEquals(result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); - assertEquals(result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); + assertEquals( + result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); + assertEquals( + result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 6a9617ea41b448..837dec2f528ed3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,6 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + import com.datahub.authentication.Authentication; +import com.datahub.authorization.AuthorizationResult; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.dashboard.DashboardUsageStatistics; @@ -13,24 +20,22 @@ import com.linkedin.datahub.graphql.resolvers.dataset.DatasetStatsSummaryResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.EnvelopedAspect; +import com.linkedin.metadata.client.UsageStatsJavaClient; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.timeseries.GenericTable; -import com.linkedin.usage.UsageClient; import com.linkedin.usage.UsageQueryResult; import com.linkedin.usage.UsageQueryResultAggregations; import com.linkedin.usage.UsageTimeRange; import com.linkedin.usage.UserUsageCounts; import com.linkedin.usage.UserUsageCountsArray; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - public class DashboardStatsSummaryTest { private static final Dashboard TEST_SOURCE = new Dashboard(); @@ -50,6 +55,12 @@ public void testGetSuccess() throws Exception { // Execute resolver DashboardStatsSummaryResolver resolver = new DashboardStatsSummaryResolver(mockClient); QueryContext mockContext = Mockito.mock(QueryContext.class); + Authorizer mockAuthorizor = mock(Authorizer.class); + when(mockAuthorizor.authorize(any())) + .thenAnswer( + args -> + new AuthorizationResult(args.getArgument(0), AuthorizationResult.Type.ALLOW, "")); + when(mockContext.getAuthorizer()).thenReturn(mockAuthorizor); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getSource()).thenReturn(TEST_SOURCE); @@ -65,60 +76,56 @@ public void testGetSuccess() throws Exception { Assert.assertEquals((int) result.getUniqueUserCountLast30Days(), 2); // Validate the cache. -- First return a new result. - DashboardUsageStatistics newUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(40); - EnvelopedAspect newResult = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); + DashboardUsageStatistics newUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(40); + EnvelopedAspect newResult = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn(ImmutableList.of(newResult)); - - // Then verify that the new result is _not_ returned (cache hit) - DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); - Assert.assertEquals((int) cachedResult.getViewCount(), 20); - Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); - Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); + Mockito.when( + mockClient.getAspectValues( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(newResult)); } @Test public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); - - UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DASHBOARD_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); + + UsageStatsJavaClient mockClient = Mockito.mock(UsageStatsJavaClient.class); + Mockito.when( + mockClient.getUsageStats( + any(OperationContext.class), + Mockito.eq(TEST_DASHBOARD_URN), + Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -140,48 +147,48 @@ private TimeseriesAspectService initTestAspectService() { TimeseriesAspectService mockClient = Mockito.mock(TimeseriesAspectService.class); // Mock fetching the latest absolute (snapshot) statistics - DashboardUsageStatistics latestUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(20); - EnvelopedAspect envelopedLatestStats = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); + DashboardUsageStatistics latestUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(20); + EnvelopedAspect envelopedLatestStats = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn( - ImmutableList.of(envelopedLatestStats) - ); - - Mockito.when(mockClient.getAggregatedStats( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.any(), - Mockito.any(Filter.class), - Mockito.any() - )).thenReturn( - new GenericTable().setRows(new StringArrayArray( - new StringArray(ImmutableList.of( - TEST_USER_URN_1, "10", "20", "30", "1", "1", "1" - )), - new StringArray(ImmutableList.of( - TEST_USER_URN_2, "20", "30", "40", "1", "1", "1" - )) - )) - .setColumnNames(new StringArray()) - .setColumnTypes(new StringArray()) - ); + Mockito.when( + mockClient.getAspectValues( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(envelopedLatestStats)); + + Mockito.when( + mockClient.getAggregatedStats( + any(), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.any(), + Mockito.any(Filter.class), + Mockito.any())) + .thenReturn( + new GenericTable() + .setRows( + new StringArrayArray( + new StringArray( + ImmutableList.of(TEST_USER_URN_1, "10", "20", "30", "1", "1", "1")), + new StringArray( + ImmutableList.of(TEST_USER_URN_2, "20", "30", "40", "1", "1", "1")))) + .setColumnNames(new StringArray()) + .setColumnTypes(new StringArray())); return mockClient; } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtilsTest.java new file mode 100644 index 00000000000000..18ede7c306e424 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/DataContractUtilsTest.java @@ -0,0 +1,63 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import static org.mockito.Mockito.mock; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.authorization.AuthorizationRequest; +import com.datahub.authorization.AuthorizationResult; +import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import graphql.Assert; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.util.Optional; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DataContractUtilsTest { + + @Test + public void testCanEditDataContract() { + Urn testUrn = UrnUtils.getUrn("urn:li:dataContract:test"); + boolean result = + DataContractUtils.canEditDataContract( + new QueryContext() { + @Override + public boolean isAuthenticated() { + return true; + } + + @Override + public Authentication getAuthentication() { + Authentication auth = new Authentication(new Actor(ActorType.USER, "test"), "TEST"); + return auth; + } + + @Override + public Authorizer getAuthorizer() { + Authorizer authorizer = mock(Authorizer.class); + Mockito.when(authorizer.authorize(Mockito.any(AuthorizationRequest.class))) + .thenReturn( + new AuthorizationResult( + new AuthorizationRequest( + "TEST", "test", Optional.of(new EntitySpec("dataset", "test"))), + AuthorizationResult.Type.ALLOW, + "TEST")); + return authorizer; + } + + @Override + public OperationContext getOperationContext() { + return TestOperationContexts.userContextNoSearchAuthorization( + getAuthorizer(), getAuthentication()); + } + }, + testUrn); + Assert.assertTrue(result); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolverTest.java new file mode 100644 index 00000000000000..8b757a24d6566f --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/EntityDataContractResolverTest.java @@ -0,0 +1,206 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import static com.linkedin.datahub.graphql.resolvers.datacontract.EntityDataContractResolver.*; +import static org.mockito.ArgumentMatchers.nullable; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationshipArray; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datacontract.DataContractProperties; +import com.linkedin.datacontract.DataContractState; +import com.linkedin.datacontract.DataContractStatus; +import com.linkedin.datacontract.DataQualityContract; +import com.linkedin.datacontract.DataQualityContractArray; +import com.linkedin.datacontract.FreshnessContract; +import com.linkedin.datacontract.FreshnessContractArray; +import com.linkedin.datacontract.SchemaContract; +import com.linkedin.datacontract.SchemaContractArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.key.DataContractKey; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class EntityDataContractResolverTest { + + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + private static final Urn TEST_DATA_CONTRACT_URN = UrnUtils.getUrn("urn:li:dataContract:test"); + private static final Urn TEST_QUALITY_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:quality"); + private static final Urn TEST_FRESHNESS_ASSERTION_URN = + UrnUtils.getUrn("urn:li:assertion:freshness"); + private static final Urn TEST_SCHEMA_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:schema"); + + @Test + public void testGetSuccessOneContract() throws Exception { + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); + + Mockito.when( + mockGraphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN.toString()), + Mockito.eq(ImmutableList.of(CONTRACT_FOR_RELATIONSHIP)), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(1), + Mockito.anyString())) + .thenReturn( + new EntityRelationships() + .setTotal(1) + .setCount(1) + .setStart(0) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setType(CONTRACT_FOR_RELATIONSHIP) + .setEntity(TEST_DATA_CONTRACT_URN) + .setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setTime(0L)))))); + + Map dataContractAspects = new HashMap<>(); + + // 1. Key Aspect + dataContractAspects.put( + Constants.DATA_CONTRACT_KEY_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new DataContractKey().setId("test").data()))); + + // 2. Properties Aspect. + DataContractProperties expectedProperties = + new DataContractProperties() + .setEntity(TEST_DATASET_URN) + .setDataQuality( + new DataQualityContractArray( + ImmutableList.of( + new DataQualityContract().setAssertion(TEST_QUALITY_ASSERTION_URN)))) + .setFreshness( + new FreshnessContractArray( + ImmutableList.of( + new FreshnessContract().setAssertion(TEST_FRESHNESS_ASSERTION_URN)))) + .setSchema( + new SchemaContractArray( + ImmutableList.of( + new SchemaContract().setAssertion(TEST_SCHEMA_ASSERTION_URN)))); + + dataContractAspects.put( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(expectedProperties.data()))); + + // 3. Status Aspect + DataContractStatus expectedStatus = new DataContractStatus().setState(DataContractState.ACTIVE); + + dataContractAspects.put( + Constants.DATA_CONTRACT_STATUS_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(expectedStatus.data()))); + + Mockito.when( + mockClient.getV2( + nullable(OperationContext.class), + Mockito.eq(Constants.DATA_CONTRACT_ENTITY_NAME), + Mockito.eq(TEST_DATA_CONTRACT_URN), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(Constants.DATA_CONTRACT_ENTITY_NAME) + .setUrn(TEST_DATA_CONTRACT_URN) + .setAspects(new EnvelopedAspectMap(dataContractAspects))); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Dataset parentDataset = new Dataset(); + parentDataset.setUrn(TEST_DATASET_URN.toString()); + Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + EntityDataContractResolver resolver = + new EntityDataContractResolver(mockClient, mockGraphClient); + DataContract result = resolver.get(mockEnv).get(); + + // Assert that the result we get matches the expectations. + assertEquals(result.getUrn(), TEST_DATA_CONTRACT_URN.toString()); + assertEquals(result.getType(), EntityType.DATA_CONTRACT); + + // Verify Properties + assertEquals(result.getProperties().getDataQuality().size(), 1); + assertEquals(result.getProperties().getFreshness().size(), 1); + assertEquals(result.getProperties().getSchema().size(), 1); + assertEquals( + result.getProperties().getDataQuality().get(0).getAssertion().getUrn(), + TEST_QUALITY_ASSERTION_URN.toString()); + assertEquals( + result.getProperties().getFreshness().get(0).getAssertion().getUrn(), + TEST_FRESHNESS_ASSERTION_URN.toString()); + assertEquals( + result.getProperties().getSchema().get(0).getAssertion().getUrn(), + TEST_SCHEMA_ASSERTION_URN.toString()); + + // Verify Status + assertEquals(result.getStatus().getState().toString(), expectedStatus.getState().toString()); + } + + @Test + public void testGetSuccessNoContracts() throws Exception { + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + EntityClient mockClient = Mockito.mock(EntityClient.class); + + Mockito.when( + mockGraphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN.toString()), + Mockito.eq(ImmutableList.of(CONTRACT_FOR_RELATIONSHIP)), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(1), + Mockito.anyString())) + .thenReturn( + new EntityRelationships() + .setTotal(0) + .setCount(0) + .setStart(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); + + EntityDataContractResolver resolver = + new EntityDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Dataset parentDataset = new Dataset(); + parentDataset.setUrn(TEST_DATASET_URN.toString()); + Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + DataContract result = resolver.get(mockEnv).get(); + + assertNull(result); + Mockito.verifyNoMoreInteractions(mockClient); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolverTest.java new file mode 100644 index 00000000000000..bf01b54c7ed726 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/datacontract/UpsertDataContractResolverTest.java @@ -0,0 +1,419 @@ +package com.linkedin.datahub.graphql.resolvers.datacontract; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.datacontract.EntityDataContractResolver.*; +import static com.linkedin.metadata.utils.SystemMetadataUtils.createDefaultSystemMetadata; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationshipArray; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringMap; +import com.linkedin.datacontract.DataContractProperties; +import com.linkedin.datacontract.DataContractStatus; +import com.linkedin.datacontract.DataQualityContract; +import com.linkedin.datacontract.DataQualityContractArray; +import com.linkedin.datacontract.FreshnessContract; +import com.linkedin.datacontract.FreshnessContractArray; +import com.linkedin.datacontract.SchemaContract; +import com.linkedin.datacontract.SchemaContractArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.DataContractState; +import com.linkedin.datahub.graphql.generated.DataQualityContractInput; +import com.linkedin.datahub.graphql.generated.FreshnessContractInput; +import com.linkedin.datahub.graphql.generated.SchemaContractInput; +import com.linkedin.datahub.graphql.generated.UpsertDataContractInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.AspectType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.key.DataContractKey; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.CompletionException; +import java.util.stream.Collectors; +import org.mockito.ArgumentCaptor; +import org.mockito.Captor; +import org.mockito.Mockito; +import org.mockito.MockitoAnnotations; +import org.testng.Assert; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class UpsertDataContractResolverTest { + + private static final Urn TEST_CONTRACT_URN = UrnUtils.getUrn("urn:li:dataContract:test-id"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + private static final Urn TEST_FRESHNESS_ASSERTION_URN = + UrnUtils.getUrn("urn:li:assertion:freshness"); + private static final Urn TEST_SCHEMA_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:schema"); + private static final Urn TEST_QUALITY_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:quality"); + + private static final UpsertDataContractInput TEST_CREATE_INPUT = + new UpsertDataContractInput( + TEST_DATASET_URN.toString(), + ImmutableList.of(new FreshnessContractInput(TEST_FRESHNESS_ASSERTION_URN.toString())), + ImmutableList.of(new SchemaContractInput(TEST_SCHEMA_ASSERTION_URN.toString())), + ImmutableList.of(new DataQualityContractInput(TEST_QUALITY_ASSERTION_URN.toString())), + DataContractState.PENDING, + "test-id"); + + private static final UpsertDataContractInput TEST_VALID_UPDATE_INPUT = + new UpsertDataContractInput( + TEST_DATASET_URN.toString(), + ImmutableList.of(new FreshnessContractInput(TEST_FRESHNESS_ASSERTION_URN.toString())), + ImmutableList.of(new SchemaContractInput(TEST_SCHEMA_ASSERTION_URN.toString())), + ImmutableList.of(new DataQualityContractInput(TEST_QUALITY_ASSERTION_URN.toString())), + DataContractState.ACTIVE, + null); + + private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + + @Captor private ArgumentCaptor> proposalCaptor; + + @BeforeTest + public void init() { + MockitoAnnotations.openMocks(this); + } + + @Test + public void testGetSuccessCreate() throws Exception { + // Expected results + final DataContractKey key = new DataContractKey(); + key.setId("test-id"); + final Urn dataContractUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, Constants.DATA_CONTRACT_ENTITY_NAME); + + final DataContractStatus status = new DataContractStatus(); + status.setState(com.linkedin.datacontract.DataContractState.PENDING); + + final DataContractProperties props = new DataContractProperties(); + props.setEntity(TEST_DATASET_URN); + props.setDataQuality( + new DataQualityContractArray( + ImmutableList.of(new DataQualityContract().setAssertion(TEST_QUALITY_ASSERTION_URN)))); + props.setFreshness( + new FreshnessContractArray( + ImmutableList.of(new FreshnessContract().setAssertion(TEST_FRESHNESS_ASSERTION_URN)))); + props.setSchema( + new SchemaContractArray( + ImmutableList.of(new SchemaContract().setAssertion(TEST_SCHEMA_ASSERTION_URN)))); + + // Create resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + initMockGraphClient(mockGraphClient, null); + initMockEntityClient(mockClient, null, props); // No existing contract + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_CREATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + DataContract result = resolver.get(mockEnv).get(); + + final MetadataChangeProposal propertiesProposal = new MetadataChangeProposal(); + propertiesProposal.setEntityUrn(dataContractUrn); + propertiesProposal.setEntityType(Constants.DATA_CONTRACT_ENTITY_NAME); + propertiesProposal.setSystemMetadata( + createDefaultSystemMetadata() + .setProperties(new StringMap(ImmutableMap.of("appSource", "ui")))); + propertiesProposal.setAspectName(Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME); + propertiesProposal.setAspect(GenericRecordUtils.serializeAspect(props)); + propertiesProposal.setChangeType(ChangeType.UPSERT); + + final MetadataChangeProposal statusProposal = new MetadataChangeProposal(); + statusProposal.setEntityUrn(dataContractUrn); + statusProposal.setEntityType(Constants.DATA_CONTRACT_ENTITY_NAME); + statusProposal.setSystemMetadata( + createDefaultSystemMetadata() + .setProperties(new StringMap(ImmutableMap.of("appSource", "ui")))); + statusProposal.setAspectName(Constants.DATA_CONTRACT_STATUS_ASPECT_NAME); + statusProposal.setAspect(GenericRecordUtils.serializeAspect(status)); + statusProposal.setChangeType(ChangeType.UPSERT); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + any(OperationContext.class), proposalCaptor.capture(), Mockito.eq(false)); + + // check has time + Assert.assertTrue( + proposalCaptor.getValue().stream() + .allMatch(prop -> prop.getSystemMetadata().getLastObserved() > 0L)); + + // check without time + Assert.assertEquals( + proposalCaptor.getValue().stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList()), + List.of(propertiesProposal, statusProposal).stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList())); + + Assert.assertEquals(result.getUrn(), TEST_CONTRACT_URN.toString()); + } + + @Test + public void testGetSuccessUpdate() throws Exception { + + DataContractProperties props = new DataContractProperties(); + props.setEntity(TEST_DATASET_URN); + props.setDataQuality( + new DataQualityContractArray( + ImmutableList.of(new DataQualityContract().setAssertion(TEST_QUALITY_ASSERTION_URN)))); + props.setFreshness( + new FreshnessContractArray( + ImmutableList.of(new FreshnessContract().setAssertion(TEST_FRESHNESS_ASSERTION_URN)))); + props.setSchema( + new SchemaContractArray( + ImmutableList.of(new SchemaContract().setAssertion(TEST_SCHEMA_ASSERTION_URN)))); + + DataContractStatus status = new DataContractStatus(); + status.setState(com.linkedin.datacontract.DataContractState.ACTIVE); + + // Update resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + initMockGraphClient(mockGraphClient, TEST_CONTRACT_URN); + initMockEntityClient(mockClient, TEST_CONTRACT_URN, props); // Contract Exists + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_VALID_UPDATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + DataContract result = resolver.get(mockEnv).get(); + + final MetadataChangeProposal propertiesProposal = new MetadataChangeProposal(); + propertiesProposal.setEntityUrn(TEST_CONTRACT_URN); + propertiesProposal.setEntityType(Constants.DATA_CONTRACT_ENTITY_NAME); + propertiesProposal.setSystemMetadata( + createDefaultSystemMetadata() + .setProperties(new StringMap(ImmutableMap.of("appSource", "ui")))); + propertiesProposal.setAspectName(Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME); + propertiesProposal.setAspect(GenericRecordUtils.serializeAspect(props)); + propertiesProposal.setChangeType(ChangeType.UPSERT); + + final MetadataChangeProposal statusProposal = new MetadataChangeProposal(); + statusProposal.setEntityUrn(TEST_CONTRACT_URN); + statusProposal.setEntityType(Constants.DATA_CONTRACT_ENTITY_NAME); + statusProposal.setSystemMetadata( + createDefaultSystemMetadata() + .setProperties(new StringMap(ImmutableMap.of("appSource", "ui")))); + statusProposal.setAspectName(Constants.DATA_CONTRACT_STATUS_ASPECT_NAME); + statusProposal.setAspect(GenericRecordUtils.serializeAspect(status)); + statusProposal.setChangeType(ChangeType.UPSERT); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals( + any(OperationContext.class), proposalCaptor.capture(), Mockito.eq(false)); + + // check has time + Assert.assertTrue( + proposalCaptor.getValue().stream() + .allMatch(prop -> prop.getSystemMetadata().getLastObserved() > 0L)); + + // check without time + Assert.assertEquals( + proposalCaptor.getValue().stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList()), + List.of(propertiesProposal, statusProposal).stream() + .map(m -> m.getSystemMetadata().setLastObserved(0)) + .collect(Collectors.toList())); + + Assert.assertEquals(result.getUrn(), TEST_CONTRACT_URN.toString()); + } + + @Test + public void testGetFailureEntityDoesNotExist() throws Exception { + // Update resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + initMockGraphClient(mockGraphClient, TEST_CONTRACT_URN); + Mockito.when(mockClient.exists(any(OperationContext.class), Mockito.eq(TEST_DATASET_URN))) + .thenReturn(false); + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_CREATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } + + @Test + public void testGetFailureAssertionDoesNotExist() throws Exception { + // Update resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + initMockGraphClient(mockGraphClient, TEST_CONTRACT_URN); + Mockito.when(mockClient.exists(any(OperationContext.class), Mockito.eq(TEST_DATASET_URN))) + .thenReturn(true); + Mockito.when( + mockClient.exists( + any(OperationContext.class), Mockito.eq(TEST_FRESHNESS_ASSERTION_URN))) + .thenReturn(false); + Mockito.when( + mockClient.exists(any(OperationContext.class), Mockito.eq(TEST_QUALITY_ASSERTION_URN))) + .thenReturn(false); + Mockito.when( + mockClient.exists(any(OperationContext.class), Mockito.eq(TEST_SCHEMA_ASSERTION_URN))) + .thenReturn(false); + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_CREATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } + + @Test + public void testGetUnauthorized() throws Exception { + // Create resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_CREATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(any(OperationContext.class), Mockito.any()); + } + + @Test + public void testGetEntityClientException() throws Exception { + // Create resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + GraphClient mockGraphClient = Mockito.mock(GraphClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(OperationContext.class), Mockito.any(), Mockito.eq(false)); + UpsertDataContractResolver resolver = + new UpsertDataContractResolver(mockClient, mockGraphClient); + + // Execute resolver + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_CREATE_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } + + private void initMockGraphClient(GraphClient client, Urn existingContractUrn) { + if (existingContractUrn != null) { + Mockito.when( + client.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN.toString()), + Mockito.eq(ImmutableList.of(CONTRACT_FOR_RELATIONSHIP)), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(1), + Mockito.anyString())) + .thenReturn( + new EntityRelationships() + .setTotal(1) + .setCount(1) + .setStart(0) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(existingContractUrn) + .setType(CONTRACT_FOR_RELATIONSHIP) + .setCreated( + new AuditStamp().setActor(TEST_ACTOR_URN).setTime(0L)))))); + } else { + Mockito.when( + client.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN.toString()), + Mockito.eq(ImmutableList.of(CONTRACT_FOR_RELATIONSHIP)), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(1), + Mockito.anyString())) + .thenReturn( + new EntityRelationships() + .setTotal(0) + .setCount(0) + .setStart(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); + } + } + + private void initMockEntityClient( + EntityClient client, Urn existingContractUrn, DataContractProperties newContractProperties) + throws Exception { + if (existingContractUrn != null) { + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(existingContractUrn))) + .thenReturn(true); + } + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(TEST_DATASET_URN))) + .thenReturn(true); + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(TEST_QUALITY_ASSERTION_URN))) + .thenReturn(true); + Mockito.when( + client.exists(any(OperationContext.class), Mockito.eq(TEST_FRESHNESS_ASSERTION_URN))) + .thenReturn(true); + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(TEST_SCHEMA_ASSERTION_URN))) + .thenReturn(true); + + Mockito.when( + client.getV2( + any(OperationContext.class), + Mockito.eq(Constants.DATA_CONTRACT_ENTITY_NAME), + Mockito.eq(TEST_CONTRACT_URN), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_CONTRACT_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setType(AspectType.VERSIONED) + .setName(Constants.DATA_CONTRACT_ENTITY_NAME) + .setValue(new Aspect(newContractProperties.data())))))); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java deleted file mode 100644 index ea9ab2a1b768b7..00000000000000 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ /dev/null @@ -1,215 +0,0 @@ -package com.linkedin.datahub.graphql.resolvers.dataset; - -import com.datahub.authentication.Authentication; -import com.google.common.collect.ImmutableList; -import com.linkedin.common.EntityRelationship; -import com.linkedin.common.EntityRelationshipArray; -import com.linkedin.common.EntityRelationships; -import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.StringArray; -import com.linkedin.data.template.StringArrayArray; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.Health; -import com.linkedin.datahub.graphql.generated.HealthStatus; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.graph.GraphClient; -import com.linkedin.metadata.query.filter.RelationshipDirection; -import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import com.linkedin.timeseries.GenericTable; -import graphql.schema.DataFetchingEnvironment; -import java.util.Collections; -import java.util.List; -import org.mockito.Mockito; -import org.testng.annotations.Test; - -import static org.testng.Assert.*; - - -public class DatasetHealthResolverTest { - - private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; - private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; - private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; - - - @Test - public void testGetSuccessHealthy() throws Exception { - GraphClient graphClient = Mockito.mock(GraphClient.class); - TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts")) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )) - ) - )) - ); - - DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); - - // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - Dataset parentDataset = new Dataset(); - parentDataset.setUrn(TEST_DATASET_URN); - Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); - - List result = resolver.get(mockEnv).get(); - assertNotNull(result); - assertEquals(result.size(), 1); - assertEquals(result.get(0).getStatus(), HealthStatus.PASS); - } - - @Test - public void testGetSuccessNullHealth() throws Exception { - GraphClient graphClient = Mockito.mock(GraphClient.class); - TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - - // 0 associated assertions, meaning we don't report any health. - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(0) - .setRelationships(new EntityRelationshipArray(Collections.emptyList())) - ); - - DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); - - // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - Dataset parentDataset = new Dataset(); - parentDataset.setUrn(TEST_DATASET_URN); - Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); - - List result = resolver.get(mockEnv).get(); - assertEquals(result.size(), 0); - - Mockito.verify(mockAspectService, Mockito.times(0)).getAggregatedStats( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - } - - @Test - public void testGetSuccessUnhealthy() throws Exception { - GraphClient graphClient = Mockito.mock(GraphClient.class); - TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(2) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts"), - new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) - .setType("Asserts") - ) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )), - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN_2, "FAILURE", "0" - )) - ) - )) - ); - - DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); - - // Execute resolver - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - Dataset parentDataset = new Dataset(); - parentDataset.setUrn(TEST_DATASET_URN); - Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); - - List result = resolver.get(mockEnv).get(); - assertEquals(result.size(), 1); - assertEquals(result.get(0).getStatus(), HealthStatus.FAIL); - } -} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 013e23b779c517..f8a7e4fc6a13c8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataset; +import static org.mockito.ArgumentMatchers.any; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -8,22 +10,23 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetStatsSummary; -import com.linkedin.usage.UsageClient; +import com.linkedin.metadata.client.UsageStatsJavaClient; import com.linkedin.usage.UsageQueryResult; import com.linkedin.usage.UsageQueryResultAggregations; import com.linkedin.usage.UsageTimeRange; import com.linkedin.usage.UserUsageCounts; import com.linkedin.usage.UserUsageCountsArray; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; - public class DatasetStatsSummaryResolverTest { private static final Dataset TEST_SOURCE = new Dataset(); - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; private static final String TEST_USER_URN_1 = "urn:li:corpuser:test1"; private static final String TEST_USER_URN_2 = "urn:li:corpuser:test2"; @@ -35,28 +38,29 @@ public class DatasetStatsSummaryResolverTest { public void testGetSuccess() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); - - UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(testResult); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); + + UsageStatsJavaClient mockClient = Mockito.mock(UsageStatsJavaClient.class); + Mockito.when( + mockClient.getUsageStats( + any(OperationContext.class), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(testResult); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -65,9 +69,11 @@ public void testGetSuccess() throws Exception { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); AuthorizationResult mockAuthorizerResult = Mockito.mock(AuthorizationResult.class); Mockito.when(mockAuthorizerResult.getType()).thenReturn(AuthorizationResult.Type.ALLOW); - Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(mockAuthorizerResult); + Mockito.when(mockAuthorizer.authorize(any())).thenReturn(mockAuthorizerResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(Mockito.mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getSource()).thenReturn(TEST_SOURCE); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -84,46 +90,41 @@ public void testGetSuccess() throws Exception { // Validate the cache. -- First return a new result. UsageQueryResult newResult = new UsageQueryResult(); newResult.setAggregations(new UsageQueryResultAggregations()); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(newResult); - - // Then verify that the new result is _not_ returned (cache hit) - DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); - Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); - Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); - Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); + Mockito.when( + mockClient.getUsageStats( + any(OperationContext.class), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(newResult); } @Test public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); - - UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); + + UsageStatsJavaClient mockClient = Mockito.mock(UsageStatsJavaClient.class); + Mockito.when( + mockClient.getUsageStats( + any(OperationContext.class), + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index bae6f27a854bc7..48b3dc5f6db943 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.delete; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -11,61 +16,66 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateSoftDeletedResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingStatus() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(true); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -74,67 +84,85 @@ public void testGetSuccessNoExistingStatus() throws Exception { public void testGetSuccessExistingStatus() throws Exception { final Status originalStatus = new Status().setRemoved(true); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(false); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -145,13 +173,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); @@ -163,22 +193,24 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index ce5a02bb573e18..265a1a2e0af5ba 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -12,147 +17,182 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingDeprecation() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 0L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 0L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(0L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(0L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - final Deprecation originalDeprecation = new Deprecation() - .setDeprecated(false) - .setNote("") - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + final Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setNote("") + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(1L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(1L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -162,15 +202,20 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -181,23 +226,28 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index 5d30ae08d6dea2..ab180724da46df 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -1,6 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.Deprecation; @@ -26,20 +32,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = new UpdateDeprecationInput( - TEST_ENTITY_URN, - true, - 0L, - "Test note" - ); + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = + new UpdateDeprecationInput(TEST_ENTITY_URN, true, 0L, "Test note"); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test @@ -47,19 +45,23 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -67,48 +69,60 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation().setDeprecated(true).setDecommissionTime(0L).setNote("Test note").setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - Deprecation originalDeprecation = new Deprecation().setDeprecated(false).setDecommissionTime(1L).setActor(TEST_ACTOR_URN).setNote(""); + Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setDecommissionTime(1L) + .setActor(TEST_ACTOR_URN) + .setNote(""); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDeprecation.data())) - ))))); - - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDeprecation.data()))))))); + + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -116,28 +130,24 @@ public void testGetSuccessExistingDeprecation() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setDecommissionTime(0L) - .setNote("Test note") - .setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + verifyIngestProposal(mockClient, 1, proposal); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -145,19 +155,23 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DEPRECATION_ASPECT_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when( + mockClient.batchGetV2( + any(), + eq(Constants.DATASET_ENTITY_NAME), + eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DEPRECATION_ASPECT_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); @@ -165,49 +179,45 @@ public void testGetFailureEntityDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + EntityService mockService = Mockito.mock(EntityService.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(TEST_DEPRECATION_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 8cd3c71a21555b..1a9272c1335cf9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,205 +19,246 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchSetDomainResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_DOMAIN_1_URN = "urn:li:domain:test-id-1"; private static final String TEST_DOMAIN_2_URN = "urn:li:domain:test-id-2"; @Test public void testGetSuccessNoExistingDomains() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test public void testGetSuccessExistingDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); proposal1.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true)); } @Test public void testGetSuccessUnsetDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_2_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureDomainDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(false); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -222,31 +268,41 @@ public void testGetFailureDomainDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_DOMAIN_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_DOMAIN_1_URN)), eq(true))) + .thenReturn(true); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -256,15 +312,18 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -275,23 +334,26 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java index 1ea84b99cfec3b..8f86e33158ad59 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java @@ -6,13 +6,12 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.ArgumentMatcher; - public class CreateDomainProposalMatcher implements ArgumentMatcher { private MetadataChangeProposal left; public CreateDomainProposalMatcher(MetadataChangeProposal left) { - this.left = left; + this.left = left; } @Override @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean domainPropertiesMatch(GenericAspect left, GenericAspect right) { - DomainProperties leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DomainProperties.class - ); - - DomainProperties rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DomainProperties.class - ); + DomainProperties leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DomainProperties.class); + + DomainProperties rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DomainProperties.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 560a3865ce9e1a..c0d74225a9cf1d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -1,6 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.domain; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,65 +20,45 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; -import static org.testng.Assert.*; - - public class CreateDomainResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final CreateDomainInput TEST_INPUT = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - TEST_PARENT_DOMAIN_URN.toString() - ); + private static final CreateDomainInput TEST_INPUT = + new CreateDomainInput( + "test-id", "test-name", "test-description", TEST_PARENT_DOMAIN_URN.toString()); - private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = + new CreateDomainInput("test-id", "test-name", "test-description", null); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_DOMAIN_URN))).thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_PARENT_DOMAIN_URN))).thenReturn(true); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -81,14 +66,17 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -107,37 +95,33 @@ public void testGetSuccess() throws Exception { proposal.setChangeType(ChangeType.UPSERT); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + any(), Mockito.argThat(new CreateDomainProposalMatcher(proposal)), Mockito.eq(false)); } @Test public void testGetSuccessNoParentDomain() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_DOMAIN_URN))).thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NO_PARENT_DOMAIN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -154,28 +138,20 @@ public void testGetSuccessNoParentDomain() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(props)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + any(), Mockito.argThat(new CreateDomainProposalMatcher(proposal)), Mockito.eq(false)); } @Test public void testGetInvalidParent() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_DOMAIN_URN))).thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_PARENT_DOMAIN_URN))).thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -188,34 +164,31 @@ public void testGetInvalidParent() throws Exception { @Test public void testGetNameConflict() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_DOMAIN_URN))).thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when(mockClient.exists(any(), Mockito.eq(TEST_PARENT_DOMAIN_URN))).thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)) - )); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); DomainProperties domainProperties = new DomainProperties(); domainProperties.setDescription(TEST_INPUT.getDescription()); @@ -225,18 +198,18 @@ public void testGetNameConflict() throws Exception { EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); + envelopedAspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); entityResponse.setAspects(envelopedAspectMap); Map entityResponseMap = new HashMap<>(); entityResponseMap.put(TEST_DOMAIN_URN, entityResponse); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class) - )).thenReturn(entityResponseMap); + Mockito.when( + mockClient.batchGetV2( + any(), Mockito.eq(Constants.DOMAIN_ENTITY_NAME), Mockito.any(), Mockito.any())) + .thenReturn(entityResponseMap); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } @@ -245,7 +218,7 @@ public void testGetNameConflict() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -255,19 +228,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), Mockito.eq(false)); + EntityService mockService = getMockEntityService(); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), Mockito.eq(false)); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -278,4 +249,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 9bcdbe6d2a0e0a..f2f4ccafffae7f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -10,10 +13,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteDomainResolverTest { private static final String TEST_URN = "urn:li:domain:test-id"; @@ -30,15 +29,20 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has 0 child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + any(), + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(Urn.createFromString(TEST_URN))); } @Test @@ -53,14 +57,19 @@ public void testDeleteWithChildDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + any(), + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1))) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), Mockito.any()); } @Test @@ -76,8 +85,6 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), Mockito.any()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 93fe3d00171606..ad5d7f1ef6b06f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -7,7 +12,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; @@ -21,23 +26,16 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.Collections; import java.util.stream.Collectors; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.testng.Assert.*; - - public class DomainEntitiesResolverTest { - private static final DomainEntitiesInput TEST_INPUT = new DomainEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final DomainEntitiesInput TEST_INPUT = + new DomainEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -47,41 +45,49 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String domainUrn = "urn:li:domain:test-domain"; - final Criterion filterCriterion = new Criterion() - .setField("domains.keyword") - .setCondition(Condition.EQUAL) - .setValue(domainUrn); + final Criterion filterCriterion = + new Criterion() + .setField("domains.keyword") + .setCondition(Condition.EQUAL) + .setValue(domainUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList())), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + any(), + Mockito.eq( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList())), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); DomainEntitiesResolver resolver = new DomainEntitiesResolver(mockClient); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -95,6 +101,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index bd8a8f98de4974..c3b1a8c564855a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -1,6 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.domain; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -8,7 +13,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.DomainUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.SearchEntity; @@ -16,50 +20,48 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class ListDomainsResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final ListDomainsInput TEST_INPUT = new ListDomainsInput( - 0, 20, null, TEST_PARENT_DOMAIN_URN.toString() - ); + private static final ListDomainsInput TEST_INPUT = + new ListDomainsInput(0, 20, null, TEST_PARENT_DOMAIN_URN.toString()); - private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = new ListDomainsInput( - 0, 20, null, null - ); + private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = + new ListDomainsInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), + Mockito.eq( + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -74,7 +76,8 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -82,22 +85,27 @@ public void testGetSuccessNoParentDomain() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(null)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(null)), + Mockito.eq( + Collections.singletonList( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -112,7 +120,8 @@ public void testGetSuccessNoParentDomain() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -124,33 +133,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + any(), + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver @@ -161,4 +170,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 4059c180b0eb03..07fad314747db8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; @@ -17,59 +24,65 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class MoveDomainResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; private static final String PARENT_DOMAIN_URN = "urn:li:domain:00005397daf94708a8822b8106cfd451"; private static final String DOMAIN_URN = "urn:li:domain:11115397daf94708a8822b8106cfd451"; private static final MoveDomainInput INPUT = new MoveDomainInput(PARENT_DOMAIN_URN, DOMAIN_URN); - private static final MoveDomainInput INVALID_INPUT = new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); + private static final MoveDomainInput INVALID_INPUT = + new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(DOMAIN_URN)), + eq(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + eq(0L))) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(name, Urn.createFromString(PARENT_DOMAIN_URN))), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + name, Urn.createFromString(PARENT_DOMAIN_URN))), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(name); properties.setParentDomain(Urn.createFromString(PARENT_DOMAIN_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); } @Test public void testGetSuccess() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -77,18 +90,20 @@ public void testGetSuccess() throws Exception { setupTests(mockEnv, mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + any(), + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test public void testGetFailureEntityDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -97,10 +112,12 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(DOMAIN_URN)), + eq(DOMAIN_PROPERTIES_ASPECT_NAME), + eq(0))) .thenReturn(null); MoveDomainResolver resolver = new MoveDomainResolver(mockService, mockClient); @@ -110,9 +127,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { @Test public void testGetFailureParentDoesNotExist() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -125,9 +143,10 @@ public void testGetFailureParentDoesNotExist() throws Exception { @Test public void testGetFailureParentIsNotDomain() throws Exception { - EntityService mockService = Mockito.mock(EntityService.class); + EntityService mockService = Mockito.mock(EntityService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(PARENT_DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(PARENT_DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 7bd7c3afac001c..5bd837ce4082e0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertEquals; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,15 +17,12 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentDomainsResolverTest { @Test @@ -29,6 +30,8 @@ public void testGetSuccessForDomain() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -38,58 +41,68 @@ public void testGetSuccessForDomain() throws Exception { domainEntity.setType(EntityType.DOMAIN); Mockito.when(mockEnv.getSource()).thenReturn(domainEntity); - final DomainProperties parentDomain1 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:11115397daf94708a8822b8106cfd451") - ).setName("test def"); - final DomainProperties parentDomain2 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:22225397daf94708a8822b8106cfd451") - ).setName("test def 2"); + final DomainProperties parentDomain1 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:11115397daf94708a8822b8106cfd451")) + .setName("test def"); + final DomainProperties parentDomain2 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:22225397daf94708a8822b8106cfd451")) + .setName("test def 2"); Map domainAspects = new HashMap<>(); - domainAspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); + domainAspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); Map parentDomain1Aspects = new HashMap<>(); - parentDomain1Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 1").setParentDomain(parentDomain2.getParentDomain()).data() - ))); + parentDomain1Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new DomainProperties() + .setName("domain parent 1") + .setParentDomain(parentDomain2.getParentDomain()) + .data()))); Map parentDomain2Aspects = new HashMap<>(); - parentDomain2Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 2").data() - ))); + parentDomain2Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new DomainProperties().setName("domain parent 2").data()))); - Mockito.when(mockClient.getV2( - Mockito.eq(domainUrn.getEntityType()), - Mockito.eq(domainUrn), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(domainUrn.getEntityType()), + Mockito.eq(domainUrn), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain1.getParentDomain().getEntityType()), - Mockito.eq(parentDomain1.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentDomain1.getParentDomain().getEntityType()), + Mockito.eq(parentDomain1.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain2.getParentDomain().getEntityType()), - Mockito.eq(parentDomain2.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentDomain2.getParentDomain().getEntityType()), + Mockito.eq(parentDomain2.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); ParentDomainsResolver resolver = new ParentDomainsResolver(mockClient); ParentDomainsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(3)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(3)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getDomains().get(0).getUrn(), parentDomain1.getParentDomain().toString()); assertEquals(result.getDomains().get(1).getUrn(), parentDomain2.getParentDomain().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 92fb26288aa1dc..5437f1c860fde6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -1,6 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,14 +32,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; private static final String TEST_NEW_DOMAIN_URN = "urn:li:domain:test-id-2"; @@ -43,20 +45,25 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -68,51 +75,57 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -124,23 +137,20 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true)); } @Test @@ -149,20 +159,25 @@ public void testGetFailureDomainDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(false); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(false); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -174,9 +189,7 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test @@ -185,20 +198,25 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_NEW_DOMAIN_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)), eq(true))) + .thenReturn(true); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); @@ -210,16 +228,14 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); SetDomainResolver resolver = new SetDomainResolver(mockClient, mockService); // Execute resolver @@ -230,18 +246,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetDomainResolver resolver = new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); + SetDomainResolver resolver = + new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -252,4 +267,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index decda39943dde3..1c61963703a2ab 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -1,6 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.domain; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,14 +32,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UnsetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; @Test @@ -42,19 +44,23 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -66,46 +72,49 @@ public void testGetSuccessNoExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + verifyIngestProposal(mockClient, 1, proposal); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -117,18 +126,14 @@ public void testGetSuccessExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); + + verifyIngestProposal(mockClient, 1, proposal); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -137,19 +142,23 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); @@ -160,16 +169,14 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, mockService); // Execute resolver @@ -179,18 +186,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); + UnsetDomainResolver resolver = + new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -200,4 +206,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index f1d44fcb472556..c45e620a46a73d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.embed; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -15,7 +19,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -25,31 +29,28 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateEmbedResolverTest { private static final String TEST_ENTITY_URN = "urn:li:dashboard:(looker,1)"; private static final String TEST_RENDER_URL = "https://www.google.com"; - private static final UpdateEmbedInput TEST_EMBED_INPUT = new UpdateEmbedInput( - TEST_ENTITY_URN, - TEST_RENDER_URL - ); + private static final UpdateEmbedInput TEST_EMBED_INPUT = + new UpdateEmbedInput(TEST_ENTITY_URN, TEST_RENDER_URL); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test public void testGetSuccessNoExistingEmbed() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(null); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -62,14 +63,14 @@ public void testGetSuccessNoExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - verifySingleIngestProposal(mockService, 1, proposal);; + verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -77,14 +78,18 @@ public void testGetSuccessExistingEmbed() throws Exception { Embed originalEmbed = new Embed().setRenderUrl("https://otherurl.com"); // Create resolver - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(originalEmbed); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalEmbed); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -97,14 +102,14 @@ public void testGetSuccessExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -112,19 +117,23 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DASHBOARD_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); @@ -136,17 +145,15 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - );; + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); + ; } @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -156,20 +163,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + EntityService mockService = getMockEntityService(); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any()); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -180,4 +184,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index cde2739b2bcc65..a786edba5d540a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -1,14 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Collection; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EntityExistsResolverTest { private static final String ENTITY_URN_STRING = "urn:li:corpuser:test"; @@ -21,6 +23,10 @@ public void setupTest() { _entityService = mock(EntityService.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + QueryContext queryContext = mock(QueryContext.class); + when(queryContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + when(_dataFetchingEnvironment.getContext()).thenReturn(queryContext); + _resolver = new EntityExistsResolver(_entityService); } @@ -33,8 +39,9 @@ public void testFailsNullEntity() { @Test public void testPasses() throws Exception { - when(_dataFetchingEnvironment.getArgument("urn")).thenReturn(ENTITY_URN_STRING); - when(_entityService.exists(any())).thenReturn(true); + when(_dataFetchingEnvironment.getArgument(eq("urn"))).thenReturn(ENTITY_URN_STRING); + when(_entityService.exists(any(OperationContext.class), any(Collection.class))) + .thenAnswer(args -> args.getArgument(1)); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java index 913ea4602faf00..04b9a1a3dcd002 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Chart; @@ -14,14 +18,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class EntityPrivilegesResolverTest { final String glossaryTermUrn = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; @@ -29,7 +29,8 @@ public class EntityPrivilegesResolverTest { final String datasetUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)"; final String chartUrn = "urn:li:chart:(looker,baz1)"; final String dashboardUrn = "urn:li:dashboard:(looker,dashboards.1)"; - final String dataJobUrn = "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; + final String dataJobUrn = + "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; private DataFetchingEnvironment setUpTestWithPermissions(Entity entity) { QueryContext mockContext = getMockAllowContext(); @@ -115,11 +116,10 @@ public void testGetFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); DataFetchingEnvironment mockEnv = setUpTestWithoutPermissions(glossaryNode); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .getV2( + any(), Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), Mockito.any(), Mockito.any()); EntityPrivilegesResolver resolver = new EntityPrivilegesResolver(mockClient); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java new file mode 100644 index 00000000000000..a7b753cb9d0148 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/BatchRemoveFormResolverTest.java @@ -0,0 +1,82 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BatchAssignFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.com.google.common.collect.ImmutableList; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class BatchRemoveFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final BatchAssignFormInput TEST_INPUT = + new BatchAssignFormInput(TEST_FORM_URN, ImmutableList.of(TEST_DATASET_URN)); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true); + BatchRemoveFormResolver resolver = new BatchRemoveFormResolver(mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called unassign on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .batchUnassignFormForEntities( + any(), + Mockito.eq(ImmutableList.of(UrnUtils.getUrn(TEST_DATASET_URN))), + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + } + + @Test + public void testThrowsError() throws Exception { + FormService mockFormService = initMockFormService(false); + BatchRemoveFormResolver resolver = new BatchRemoveFormResolver(mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we called unassign on the service - but it throws an error + Mockito.verify(mockFormService, Mockito.times(1)) + .batchUnassignFormForEntities( + any(), + Mockito.eq(ImmutableList.of(UrnUtils.getUrn(TEST_DATASET_URN))), + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + } + + private FormService initMockFormService(final boolean shouldSucceed) throws Exception { + FormService service = Mockito.mock(FormService.class); + + if (!shouldSucceed) { + Mockito.doThrow(new RuntimeException()) + .when(service) + .batchUnassignFormForEntities(any(), Mockito.any(), Mockito.any()); + } + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolverTest.java new file mode 100644 index 00000000000000..65f51830ee148e --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/CreateFormResolverTest.java @@ -0,0 +1,116 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateFormInput; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.FormType; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.form.FormInfo; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class CreateFormResolverTest { + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final CreateFormInput TEST_INPUT = + new CreateFormInput(null, "test name", null, FormType.VERIFICATION, new ArrayList<>(), null); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true); + EntityClient mockEntityClient = initMockEntityClient(); + CreateFormResolver resolver = new CreateFormResolver(mockEntityClient, mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Form form = resolver.get(mockEnv).get(); + + assertEquals(form.getUrn(), TEST_FORM_URN); + + // Validate that we called create on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .createForm(any(), any(FormInfo.class), Mockito.eq(null)); + } + + @Test + public void testGetUnauthorized() throws Exception { + FormService mockFormService = initMockFormService(true); + EntityClient mockEntityClient = initMockEntityClient(); + CreateFormResolver resolver = new CreateFormResolver(mockEntityClient, mockFormService); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call create on the service + Mockito.verify(mockFormService, Mockito.times(0)) + .createForm(any(), any(FormInfo.class), Mockito.eq(null)); + } + + @Test + public void testGetFailure() throws Exception { + FormService mockFormService = initMockFormService(false); + EntityClient mockEntityClient = initMockEntityClient(); + CreateFormResolver resolver = new CreateFormResolver(mockEntityClient, mockFormService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we called create on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .createForm(any(), any(FormInfo.class), Mockito.eq(null)); + } + + private FormService initMockFormService(final boolean shouldSucceed) throws Exception { + FormService service = Mockito.mock(FormService.class); + + if (shouldSucceed) { + Mockito.when(service.createForm(any(), Mockito.any(), Mockito.any())) + .thenReturn(UrnUtils.getUrn("urn:li:form:1")); + } else { + Mockito.when(service.createForm(any(), Mockito.any(), Mockito.any())) + .thenThrow(new RuntimeException()); + } + + return service; + } + + private EntityClient initMockEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + EntityResponse response = new EntityResponse(); + response.setEntityName(Constants.FORM_ENTITY_NAME); + response.setUrn(UrnUtils.getUrn(TEST_FORM_URN)); + response.setAspects(new EnvelopedAspectMap()); + Mockito.when( + client.getV2(any(), Mockito.eq(Constants.FORM_ENTITY_NAME), any(), Mockito.eq(null))) + .thenReturn(response); + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolverTest.java new file mode 100644 index 00000000000000..ded79ed9a00184 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/DeleteFormResolverTest.java @@ -0,0 +1,90 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DeleteFormInput; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DeleteFormResolverTest { + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final DeleteFormInput TEST_INPUT = new DeleteFormInput(TEST_FORM_URN); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + DeleteFormResolver resolver = new DeleteFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Boolean success = resolver.get(mockEnv).get(); + assertTrue(success); + + // Validate that we called delete + Mockito.verify(mockEntityClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + DeleteFormResolver resolver = new DeleteFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call delete and delete references + Mockito.verify(mockEntityClient, Mockito.times(0)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + Mockito.verify(mockEntityClient, Mockito.times(0)) + .deleteEntityReferences(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + } + + @Test + public void testGetFailure() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false); + DeleteFormResolver resolver = new DeleteFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that deleteEntity was called, but since it failed, delete references was not called + Mockito.verify(mockEntityClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + Mockito.verify(mockEntityClient, Mockito.times(0)) + .deleteEntityReferences(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN))); + } + + private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + if (!shouldSucceed) { + Mockito.doThrow(new RemoteInvocationException()).when(client).deleteEntity(any(), any()); + } + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java new file mode 100644 index 00000000000000..d227f7f919038b --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/IsFormAssignedToMeResolverTest.java @@ -0,0 +1,165 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.datahub.authentication.group.GroupService; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CorpGroup; +import com.linkedin.datahub.graphql.generated.CorpUser; +import com.linkedin.datahub.graphql.generated.FormActorAssignment; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IsFormAssignedToMeResolverTest { + + private static final Urn TEST_USER_1 = UrnUtils.getUrn("urn:li:corpuser:test-1"); + private static final Urn TEST_USER_2 = UrnUtils.getUrn("urn:li:corpuser:test-2"); + private static final Urn TEST_GROUP_1 = UrnUtils.getUrn("urn:li:corpGroup:test-1"); + private static final Urn TEST_GROUP_2 = UrnUtils.getUrn("urn:li:corpGroup:test-2"); + + @Test + public void testGetSuccessUserMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, Collections.emptyList()); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessGroupMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessBothMatch() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); // is in group + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_1.toString()); + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_1.toString()); // is matching user + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertTrue(resolver.get(mockEnv).get()); + Mockito.verifyNoMoreInteractions(groupService); // Should not perform group lookup. + } + + @Test + public void testGetSuccessNoMatchNullAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchEmptyAssignment() throws Exception { + GroupService groupService = + mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1, TEST_GROUP_2)); + + FormActorAssignment actors = new FormActorAssignment(); + actors.setUsers(Collections.emptyList()); + actors.setGroups(Collections.emptyList()); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + @Test + public void testGetSuccessNoMatchNoAssignmentMatch() throws Exception { + GroupService groupService = mockGroupService(TEST_USER_1, ImmutableList.of(TEST_GROUP_1)); + + CorpGroup assignedGroup = new CorpGroup(); + assignedGroup.setUrn(TEST_GROUP_2.toString()); // Does not match. + + CorpUser assignedUser = new CorpUser(); + assignedUser.setUrn(TEST_USER_2.toString()); // does not match + + FormActorAssignment actors = new FormActorAssignment(); + actors.setGroups(new ArrayList<>(ImmutableList.of(assignedGroup))); + actors.setUsers(new ArrayList<>(ImmutableList.of(assignedUser))); + + QueryContext mockContext = getMockAllowContext(TEST_USER_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(actors); + + IsFormAssignedToMeResolver resolver = new IsFormAssignedToMeResolver(groupService); + assertFalse(resolver.get(mockEnv).get()); + } + + private GroupService mockGroupService(final Urn userUrn, final List groupUrns) + throws Exception { + GroupService mockService = Mockito.mock(GroupService.class); + Mockito.when(mockService.getGroupsForUser(any(), Mockito.eq(userUrn))).thenReturn(groupUrns); + return mockService; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolverTest.java new file mode 100644 index 00000000000000..6a4b99742f7fd9 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/UpdateFormResolverTest.java @@ -0,0 +1,105 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Form; +import com.linkedin.datahub.graphql.generated.UpdateFormInput; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpdateFormResolverTest { + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final UpdateFormInput TEST_INPUT = + new UpdateFormInput(TEST_FORM_URN, "new name", null, null, null, null, null); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateFormResolver resolver = new UpdateFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Form form = resolver.get(mockEnv).get(); + + assertEquals(form.getUrn(), TEST_FORM_URN); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateFormResolver resolver = new UpdateFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call ingest + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetFailure() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false); + UpdateFormResolver resolver = new UpdateFormResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that ingest was called, but that caused a failure + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + EntityResponse response = new EntityResponse(); + response.setEntityName(Constants.FORM_ENTITY_NAME); + response.setUrn(UrnUtils.getUrn(TEST_FORM_URN)); + response.setAspects(new EnvelopedAspectMap()); + if (shouldSucceed) { + Mockito.when( + client.getV2(any(), Mockito.eq(Constants.FORM_ENTITY_NAME), any(), Mockito.eq(null))) + .thenReturn(response); + } else { + Mockito.when( + client.getV2(any(), Mockito.eq(Constants.FORM_ENTITY_NAME), any(), Mockito.eq(null))) + .thenThrow(new RemoteInvocationException()); + } + + Mockito.when(client.exists(any(), Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)))).thenReturn(true); + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java new file mode 100644 index 00000000000000..8d0fe4f8395d74 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/form/VerifyFormResolverTest.java @@ -0,0 +1,113 @@ +package com.linkedin.datahub.graphql.resolvers.form; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.group.GroupService; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.VerifyFormInput; +import com.linkedin.metadata.service.FormService; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class VerifyFormResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String TEST_FORM_URN = "urn:li:form:1"; + + private static final VerifyFormInput TEST_INPUT = + new VerifyFormInput(TEST_FORM_URN, TEST_DATASET_URN); + + @Test + public void testGetSuccess() throws Exception { + FormService mockFormService = initMockFormService(true, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + boolean success = resolver.get(mockEnv).get(); + + assertTrue(success); + + // Validate that we called verify on the service + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_FORM_URN)), + Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN))); + } + + @Test + public void testGetUnauthorized() throws Exception { + FormService mockFormService = initMockFormService(false, true); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do not call verify on the service + Mockito.verify(mockFormService, Mockito.times(0)) + .verifyFormForEntity(any(), Mockito.any(), Mockito.any()); + } + + @Test + public void testThrowErrorOnVerification() throws Exception { + FormService mockFormService = initMockFormService(true, false); + GroupService mockGroupService = initMockGroupService(); + VerifyFormResolver resolver = new VerifyFormResolver(mockFormService, mockGroupService); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + // Validate that we do call verifyFormForEntity but an error is thrown + Mockito.verify(mockFormService, Mockito.times(1)) + .verifyFormForEntity(any(), Mockito.any(), Mockito.any()); + } + + private FormService initMockFormService( + final boolean isFormAssignedToUser, final boolean shouldVerify) throws Exception { + FormService service = Mockito.mock(FormService.class); + Mockito.when( + service.isFormAssignedToUser( + any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any())) + .thenReturn(isFormAssignedToUser); + + if (shouldVerify) { + Mockito.when(service.verifyFormForEntity(any(), Mockito.any(), Mockito.any())) + .thenReturn(true); + } else { + Mockito.when(service.verifyFormForEntity(any(), Mockito.any(), Mockito.any())) + .thenThrow(new RuntimeException()); + } + + return service; + } + + private GroupService initMockGroupService() throws Exception { + GroupService service = Mockito.mock(GroupService.class); + Mockito.when(service.getGroupsForUser(any(), Mockito.any())).thenReturn(new ArrayList<>()); + + return service; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 26c13186c4a817..a8920fa9e5b3c2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,24 +1,25 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.*; - - public class AddRelatedTermsResolverTest { private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; @@ -27,93 +28,101 @@ public class AddRelatedTermsResolverTest { private static final String DATASET_URN = "urn:li:dataset:(test,test,test)"; private EntityService setUpService() { - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + eq(0L))) .thenReturn(null); return mockService; } @Test public void testGetSuccessIsRelatedNonExistent() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessHasRelatedNonExistent() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetFailAddSelfAsRelatedTerm() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_ENTITY_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -122,18 +131,20 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonTermAsRelatedTerm() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - DATASET_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -142,19 +153,22 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -163,19 +177,22 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddToNonExistentUrn() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -184,19 +201,22 @@ public void testGetFailAddToNonExistentUrn() throws Exception { @Test public void testGetFailAddToNonTerm() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DATASET_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(DATASET_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); @@ -205,25 +225,29 @@ public void testGetFailAddToNonTerm() throws Exception { @Test public void testFailNoPermissions() throws Exception { - EntityService mockService = setUpService(); + EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 3b47514d87181b..0f2fa7f88cd9b6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.datahub.graphql.TestUtils.verifyIngestProposal; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,39 +12,27 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; -import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static com.linkedin.metadata.Constants.*; - - public class CreateGlossaryNodeResolverTest { - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -47,8 +40,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -63,58 +56,49 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_NODE_ENTITY_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, props); } @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 2dbe637d160572..8a51d8ea100092 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -1,61 +1,56 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.datahub.graphql.TestUtils.verifyIngestProposal; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static com.linkedin.metadata.Constants.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class CreateGlossaryTermResolverTest { private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345"; - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -63,8 +58,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -80,59 +75,50 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_TERM_ENTITY_NAME, GLOSSARY_TERM_INFO_ASPECT_NAME, props); } @Test public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test @@ -140,73 +126,70 @@ public void testGetFailureExistingTermSameName() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))) - )); + mockClient.filter( + any(), + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))))); Map result = new HashMap<>(); EnvelopedAspectMap map = new EnvelopedAspectMap(); GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name"); - map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); + map.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map)); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(result); - - EntityService mockService = getMockEntityService(); + mockClient.batchGetV2( + any(), + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)))) + .thenReturn(result); + + EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( - "test-id", - "Duplicated Name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); + CreateGlossaryEntityInput input = + new CreateGlossaryEntityInput( + "test-id", + "Duplicated Name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); setupTest(mockEnv, input, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } private EntityClient initMockClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + mockClient.filter( + any(), + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(new HashMap<>()); + mockClient.batchGetV2( + any(), + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)))) + .thenReturn(new HashMap<>()); return mockClient; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 94f0d0b7a11434..9adc5d5e516e52 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -1,58 +1,61 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class DeleteGlossaryEntityResolverTest { - private static final String TEST_TERM_URN = "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; + private static final String TEST_TERM_URN = + "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_TERM_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_TERM_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(Urn.createFromString(TEST_TERM_URN))); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(any(), Mockito.any()); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_URN)), eq(true))) + .thenReturn(true); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index 677516e9404e8a..60787fc47c88a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -19,44 +23,43 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryNodesResolverTest { - final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput( - 0, 100 - ); + final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryNodeUrn1 = "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451"; final String glossaryNodeUrn2 = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryNodesResolver resolver = new GetRootGlossaryNodesResolver(mockClient); GetRootGlossaryNodesResult result = resolver.get(mockEnv).get(); @@ -64,24 +67,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); - assertEquals(result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); + assertEquals( + result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); + assertEquals( + result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 5aba32108b7db0..51760ff9d37f25 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -19,42 +23,43 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryTermsResolverTest { final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryTermUrn1 = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; final String glossaryTermUrn2 = "urn:li:glossaryTerm:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryTermsResolver resolver = new GetRootGlossaryTermsResolver(mockClient); GetRootGlossaryTermsResult result = resolver.get(mockEnv).get(); @@ -62,23 +67,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); - assertEquals(result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); + assertEquals( + result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); + assertEquals( + result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java index 8bfc32e1999ae2..448c3420625929 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java @@ -1,33 +1,33 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.google.common.collect.ImmutableSet; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.Aspect; -import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.Constants; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.Optional; -import java.util.Map; -import java.util.HashMap; - -import static org.testng.Assert.*; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; - public class GlossaryUtilsTest { private final String userUrn = "urn:li:corpuser:authorized"; @@ -44,67 +44,87 @@ private void setUpTests() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(userUrn); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node2") - ); - GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node3") - ); - + GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node2")); + GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node3")); + GlossaryNodeInfo parentNode3 = new GlossaryNodeInfo(); - + Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode1.getParentNode()).data() - ))); - + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode1.getParentNode()) + .data()))); + Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 2") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode3Aspects = new HashMap<>(); - parentNode3Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 3").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn1), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn2), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn3), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); - - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + parentNode3Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 3").data()))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn1), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn2), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn3), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); + + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); } - private void mockAuthRequest(String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { - final AuthorizationRequest authorizationRequest = new AuthorizationRequest( - userUrn, - privilege, - resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty() - ); + private void mockAuthRequest( + String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { + final AuthorizationRequest authorizationRequest = + new AuthorizationRequest( + userUrn, + privilege, + resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(allowOrDeny); Mockito.when(mockAuthorizer.authorize(Mockito.eq(authorizationRequest))).thenReturn(result); @@ -150,7 +170,8 @@ public void testCanManageChildrenEntitiesAuthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn, mockClient)); @@ -162,7 +183,8 @@ public void testCanManageChildrenEntitiesUnauthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); @@ -175,13 +197,16 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorized() throws Exceptio // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -193,13 +218,16 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorized() throws Except // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -211,10 +239,12 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorizedLevel2() throws Ex // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -226,10 +256,12 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorizedLevel2() throws // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn2, mockClient)); @@ -241,7 +273,8 @@ public void testCanManageChildrenRecursivelyEntitiesNoLevel2() throws Exception // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn3, mockClient)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 06dff7611fac86..365bdf87a76500 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -1,5 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -16,17 +22,12 @@ import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentNodesResolverTest { @Test @@ -34,6 +35,8 @@ public void testGetSuccessForTerm() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -43,76 +46,94 @@ public void testGetSuccessForTerm() throws Exception { termEntity.setType(EntityType.GLOSSARY_TERM); Mockito.when(mockEnv.getSource()).thenReturn(termEntity); - final GlossaryTermInfo parentNode1 = new GlossaryTermInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryTermInfo parentNode1 = + new GlossaryTermInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryTermAspects = new HashMap<>(); - glossaryTermAspects.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryTermAspects.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(termUrn.getEntityType()), - Mockito.eq(termUrn), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(termUrn.getEntityType()), + Mockito.eq(termUrn), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); @@ -123,6 +144,8 @@ public void testGetSuccessForNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -132,78 +155,96 @@ public void testGetSuccessForNode() throws Exception { nodeEntity.setType(EntityType.GLOSSARY_NODE); Mockito.when(mockEnv.getSource()).thenReturn(nodeEntity); - final GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryNodeAspects = new HashMap<>(); - glossaryNodeAspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryNodeAspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(nodeUrn.getEntityType()), - Mockito.eq(nodeUrn), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(nodeUrn.getEntityType()), + Mockito.eq(nodeUrn), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + any(), + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 3906d1188cb172..85019a475865e1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,5 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; @@ -8,19 +14,15 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Arrays; import java.util.concurrent.ExecutionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RemoveRelatedTermsResolverTest { @@ -34,30 +36,33 @@ public void testGetSuccessIsA() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test @@ -66,50 +71,57 @@ public void testGetSuccessHasA() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test public void testFailAspectDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -123,29 +135,32 @@ public void testFailNoPermissions() throws Exception { GlossaryTermUrn term2Urn = GlossaryTermUrn.createFromString(TEST_TERM_2_URN); final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); - Mockito.verify(mockService, Mockito.times(0)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(0)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index eee9cfbae8fcb2..b4a2655755a028 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -1,5 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; @@ -19,16 +25,11 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateNameResolverTest { private static final String NEW_NAME = "New Name"; @@ -40,30 +41,36 @@ public class UpdateNameResolverTest { private static final UpdateNameInput INPUT_FOR_DOMAIN = new UpdateNameInput(NEW_NAME, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(TERM_URN)), + eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + eq(0L))) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(NEW_NAME); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test public void testGetSuccess() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -76,9 +83,10 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessForNode() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(NODE_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_NODE); @@ -88,16 +96,19 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(NODE_URN)), + eq(GLOSSARY_NODE_INFO_ASPECT_NAME), + eq(0L))) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); @@ -106,9 +117,10 @@ public void testGetSuccessForNode() throws Exception { @Test public void testGetSuccessForDomain() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(DOMAIN_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(DOMAIN_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_FOR_DOMAIN); @@ -118,25 +130,30 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(OperationContext.class), + eq(Urn.createFromString(DOMAIN_URN)), + eq(DOMAIN_PROPERTIES_ASPECT_NAME), + eq(0L))) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); @@ -146,9 +163,12 @@ public void testGetSuccessForDomain() throws Exception { @Test public void testGetFailureEntityDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); + Mockito.when( + mockService.exists( + any(OperationContext.class), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index a78c28890fecf3..25a900d4d90696 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -15,53 +22,61 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateParentNodeResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; - private static final String PARENT_NODE_URN = "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; + private static final String PARENT_NODE_URN = + "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; private static final String TERM_URN = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; private static final String NODE_URN = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - private static final UpdateParentNodeInput INPUT = new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); - private static final UpdateParentNodeInput INPUT_WITH_NODE = new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); - private static final UpdateParentNodeInput INVALID_INPUT = new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT = + new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT_WITH_NODE = + new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); + private static final UpdateParentNodeInput INVALID_INPUT = + new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(TERM_URN)), + eq(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + eq(0L))) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test public void testGetSuccess() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists( + any(OperationContext.class), + eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), + eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -74,10 +89,14 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessForNode() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(NODE_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists( + any(), eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -87,17 +106,20 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + any(), + eq(Urn.createFromString(NODE_URN)), + eq(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + eq(0L))) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); @@ -107,10 +129,14 @@ public void testGetSuccessForNode() throws Exception { @Test public void testGetFailureEntityDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(false); + Mockito.when( + mockService.exists( + any(), eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -123,10 +149,14 @@ public void testGetFailureEntityDoesNotExist() throws Exception { @Test public void testGetFailureNodeDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists( + any(), eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -139,10 +169,14 @@ public void testGetFailureNodeDoesNotExist() throws Exception { @Test public void testGetFailureParentIsNotNode() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TERM_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists( + any(), eq(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)), eq(true))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java index a20c84d11ba9fd..005fda190be74b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AddGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; @@ -62,8 +61,8 @@ public void testFailsExternalGroup() { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(mockContext.getActorUrn()).thenReturn(USER_URN_STRING); - when(_groupService.groupExists(any())).thenReturn(true); - when(_groupService.getGroupOrigin(eq(_groupUrn))).thenReturn(groupOrigin); + when(_groupService.groupExists(any(), any())).thenReturn(true); + when(_groupService.getGroupOrigin(any(), eq(_groupUrn))).thenReturn(groupOrigin); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -77,8 +76,8 @@ public void testPassesNativeGroup() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(mockContext.getActorUrn()).thenReturn(USER_URN_STRING); - when(_groupService.groupExists(any())).thenReturn(true); - when(_groupService.getGroupOrigin(eq(_groupUrn))).thenReturn(groupOrigin); + when(_groupService.groupExists(any(), any())).thenReturn(true); + when(_groupService.getGroupOrigin(any(), eq(_groupUrn))).thenReturn(groupOrigin); _resolver.get(_dataFetchingEnvironment).join(); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java index 876de633bd6562..8c4d06cf58c9f2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateGroupResolverTest { private static final String GROUP_ID = "id"; @@ -47,7 +46,7 @@ public void testPasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(_input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_groupService.groupExists(any())).thenReturn(false); + when(_groupService.groupExists(any(), any())).thenReturn(false); _resolver.get(_dataFetchingEnvironment).join(); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java index 73b0be96fce176..6596ae92076c5d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class RemoveGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; @@ -62,8 +61,8 @@ public void testFailsExternalGroup() { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(mockContext.getActorUrn()).thenReturn(USER_URN_STRING); - when(_groupService.groupExists(any())).thenReturn(true); - when(_groupService.getGroupOrigin(eq(_groupUrn))).thenReturn(groupOrigin); + when(_groupService.groupExists(any(), any())).thenReturn(true); + when(_groupService.getGroupOrigin(any(), eq(_groupUrn))).thenReturn(groupOrigin); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -77,8 +76,8 @@ public void testPassesNativeGroup() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(mockContext.getActorUrn()).thenReturn(USER_URN_STRING); - when(_groupService.groupExists(any())).thenReturn(true); - when(_groupService.getGroupOrigin(eq(_groupUrn))).thenReturn(groupOrigin); + when(_groupService.groupExists(any(), any())).thenReturn(true); + when(_groupService.getGroupOrigin(any(), eq(_groupUrn))).thenReturn(groupOrigin); _resolver.get(_dataFetchingEnvironment).join(); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java new file mode 100644 index 00000000000000..ba4facd7c9af2b --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/health/EntityHealthResolverTest.java @@ -0,0 +1,207 @@ +package com.linkedin.datahub.graphql.resolvers.health; + +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationshipArray; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; +import com.linkedin.data.template.StringArrayArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.Health; +import com.linkedin.datahub.graphql.generated.HealthStatus; +import com.linkedin.datahub.graphql.resolvers.dataset.DatasetHealthResolver; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.graph.GraphClient; +import com.linkedin.metadata.query.filter.RelationshipDirection; +import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.timeseries.GenericTable; +import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +// TODO: Update this test once assertions summary has been added. +public class EntityHealthResolverTest { + + private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; + private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; + private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; + + @Test + public void testGetSuccessHealthy() throws Exception { + GraphClient graphClient = Mockito.mock(GraphClient.class); + TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); + + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + any(), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")))))); + + DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentDataset = new Dataset(); + parentDataset.setUrn(TEST_DATASET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); + + List result = resolver.get(mockEnv).get(); + assertNotNull(result); + assertEquals(result.size(), 1); + assertEquals(result.get(0).getStatus(), HealthStatus.PASS); + } + + @Test + public void testGetSuccessNullHealth() throws Exception { + GraphClient graphClient = Mockito.mock(GraphClient.class); + TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); + + // 0 associated assertions, meaning we don't report any health. + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); + + DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentDataset = new Dataset(); + parentDataset.setUrn(TEST_DATASET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); + + List result = resolver.get(mockEnv).get(); + assertEquals(result.size(), 0); + + Mockito.verify(mockAspectService, Mockito.times(0)) + .getAggregatedStats( + any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); + } + + @Test + public void testGetSuccessUnhealthy() throws Exception { + GraphClient graphClient = Mockito.mock(GraphClient.class); + TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); + + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"), + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + any(), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray(ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")), + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN_2, "FAILURE", "0")))))); + + DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn("urn:li:corpuser:test"); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentDataset = new Dataset(); + parentDataset.setUrn(TEST_DATASET_URN); + Mockito.when(mockEnv.getSource()).thenReturn(parentDataset); + + List result = resolver.get(mockEnv).get(); + assertEquals(result.size(), 1); + assertEquals(result.get(0).getStatus(), HealthStatus.FAIL); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java new file mode 100644 index 00000000000000..4750143b8add8b --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolverTest.java @@ -0,0 +1,170 @@ +package com.linkedin.datahub.graphql.resolvers.incident; + +import static com.linkedin.datahub.graphql.resolvers.incident.EntityIncidentsResolver.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.EntityIncidentsResult; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.utils.QueryUtils; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class EntityIncidentsResolverTest { + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + Urn incidentUrn = Urn.createFromString("urn:li:incident:test-guid"); + + Map incidentAspects = new HashMap<>(); + incidentAspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new IncidentKey().setId("test-guid").data()))); + + IncidentInfo expectedInfo = + new IncidentInfo() + .setType(IncidentType.OPERATIONAL) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(datasetUrn))) + .setSource( + new IncidentSource().setType(IncidentSourceType.MANUAL).setSourceUrn(assertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(userUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(userUrn)); + + incidentAspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(expectedInfo.data()))); + + final Map criterionMap = new HashMap<>(); + criterionMap.put(INCIDENT_ENTITIES_SEARCH_INDEX_FIELD_NAME, datasetUrn.toString()); + Filter expectedFilter = QueryUtils.newFilter(criterionMap); + + SortCriterion expectedSort = new SortCriterion(); + expectedSort.setField(CREATED_TIME_SEARCH_INDEX_FIELD_NAME); + expectedSort.setOrder(SortOrder.DESCENDING); + + Mockito.when( + mockClient.filter( + Mockito.any(), + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(expectedFilter), + Mockito.eq(Collections.singletonList(expectedSort)), + Mockito.eq(0), + Mockito.eq(10))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(incidentUrn))))); + + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn)), + Mockito.eq(null))) + .thenReturn( + ImmutableMap.of( + incidentUrn, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn) + .setAspects(new EnvelopedAspectMap(incidentAspects)))); + + EntityIncidentsResolver resolver = new EntityIncidentsResolver(mockClient); + + // Execute resolver + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("start"), Mockito.eq(0))).thenReturn(0); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("count"), Mockito.eq(20))).thenReturn(10); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Dataset parentEntity = new Dataset(); + parentEntity.setUrn(datasetUrn.toString()); + Mockito.when(mockEnv.getSource()).thenReturn(parentEntity); + + EntityIncidentsResult result = resolver.get(mockEnv).get(); + + // Assert that GraphQL Incident run event matches expectations + assertEquals(result.getStart(), 0); + assertEquals(result.getCount(), 1); + assertEquals(result.getTotal(), 1); + + com.linkedin.datahub.graphql.generated.Incident incident = + resolver.get(mockEnv).get().getIncidents().get(0); + assertEquals(incident.getUrn(), incidentUrn.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), expectedInfo.getType().toString()); + assertEquals(incident.getTitle(), expectedInfo.getTitle()); + assertEquals(incident.getDescription(), expectedInfo.getDescription()); + assertEquals(incident.getCustomType(), expectedInfo.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), expectedInfo.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), expectedInfo.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + expectedInfo.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + expectedInfo.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), expectedInfo.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + expectedInfo.getSource().getSourceUrn().toString()); + assertEquals(incident.getCreated().getActor(), expectedInfo.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), expectedInfo.getCreated().getTime()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index dae0758f6a2f65..e0555f5886b8bb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -19,17 +22,17 @@ import com.linkedin.ingestion.DataHubIngestionSourceSchedule; import com.linkedin.metadata.Constants; import com.linkedin.secret.DataHubSecretValue; +import io.datahubproject.metadata.context.OperationContext; import org.mockito.Mockito; -import static org.testng.Assert.*; - - public class IngestTestUtils { - public static final Urn TEST_INGESTION_SOURCE_URN = Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); - public static final Urn TEST_SECRET_URN = Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); - public static final Urn TEST_EXECUTION_REQUEST_URN = Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); - + public static final Urn TEST_INGESTION_SOURCE_URN = + Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); + public static final Urn TEST_SECRET_URN = + Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); + public static final Urn TEST_EXECUTION_REQUEST_URN = + Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); public static QueryContext getMockAllowContext() { QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -42,6 +45,7 @@ public static QueryContext getMockAllowContext() { Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); return mockContext; } @@ -63,8 +67,13 @@ public static DataHubIngestionSourceInfo getTestIngestionSourceInfo() { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setName("My Test Source"); info.setType("mysql"); - info.setSchedule(new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); - info.setConfig(new DataHubIngestionSourceConfig().setVersion("0.8.18").setRecipe("{}").setExecutorId("executor id")); + info.setSchedule( + new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); + info.setConfig( + new DataHubIngestionSourceConfig() + .setVersion("0.8.18") + .setRecipe("{}") + .setExecutorId("executor id")); return info; } @@ -78,15 +87,18 @@ public static DataHubSecretValue getTestSecretValue() { public static ExecutionRequestInput getTestExecutionRequestInput() { ExecutionRequestInput input = new ExecutionRequestInput(); - input.setArgs(new StringMap( - ImmutableMap.of( - "recipe", "my-custom-recipe", - "version", "0.8.18") - )); + input.setArgs( + new StringMap( + ImmutableMap.of( + "recipe", "my-custom-recipe", + "version", "0.8.18"))); input.setTask("RUN_INGEST"); input.setExecutorId("default"); input.setRequestedAt(0L); - input.setSource(new ExecutionRequestSource().setIngestionSource(TEST_INGESTION_SOURCE_URN).setType("SCHEDULED_INGESTION")); + input.setSource( + new ExecutionRequestSource() + .setIngestionSource(TEST_INGESTION_SOURCE_URN) + .setType("SCHEDULED_INGESTION")); return input; } @@ -99,7 +111,8 @@ public static ExecutionRequestResult getTestExecutionRequestResult() { return result; } - public static void verifyTestIngestionSourceGraphQL(IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { + public static void verifyTestIngestionSourceGraphQL( + IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { assertEquals(ingestionSource.getUrn(), TEST_INGESTION_SOURCE_URN.toString()); assertEquals(ingestionSource.getName(), info.getName()); assertEquals(ingestionSource.getType(), info.getType()); @@ -134,5 +147,5 @@ public static void verifyTestExecutionRequest( assertEquals(executionRequest.getResult().getStartTimeMs(), result.getStartTimeMs()); } - private IngestTestUtils() { } + private IngestTestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 12045b93614691..f3e27d91f39df0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -1,13 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; +import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.metadata.Constants; import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; public class IngestionAuthUtilsTest { @@ -16,11 +19,11 @@ public void testCanManageIngestionAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -37,11 +40,11 @@ public void testCanManageIngestionUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -58,11 +61,11 @@ public void testCanManageSecretsAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -79,11 +82,11 @@ public void testCanManageSecretsUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java index e7226c6e4db080..8fa66693a58af0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -18,35 +22,36 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CancelIngestionExecutionRequestResolverTest { - private static final CancelIngestionExecutionRequestInput TEST_INPUT = new CancelIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString(), - TEST_EXECUTION_REQUEST_URN.toString() - ); + private static final CancelIngestionExecutionRequestInput TEST_INPUT = + new CancelIngestionExecutionRequestInput( + TEST_INGESTION_SOURCE_URN.toString(), TEST_EXECUTION_REQUEST_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -57,18 +62,16 @@ Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( resolver.get(mockEnv).get(); // Verify ingest proposal has been called to create a Signal request. - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,19 +80,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java index 7973e49c6efdf8..fdf84a5cfdae17 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -12,6 +15,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -19,35 +23,37 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateIngestionExecutionRequestResolverTest { - private static final CreateIngestionExecutionRequestInput TEST_INPUT = new CreateIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString() - ); + private static final CreateIngestionExecutionRequestInput TEST_INPUT = + new CreateIngestionExecutionRequestInput(TEST_INGESTION_SOURCE_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_INGESTION_SOURCE_URN, - new EntityResponse().setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -58,11 +64,8 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); } @Test @@ -71,7 +74,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -80,21 +84,20 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,4 +108,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java index 75df2404419658..0a0a60331ee0ce 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java @@ -1,25 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestConnectionRequestResolverTest { - private static final CreateTestConnectionRequestInput TEST_INPUT = new CreateTestConnectionRequestInput( - "{}", - "0.8.44" - ); + private static final CreateTestConnectionRequestInput TEST_INPUT = + new CreateTestConnectionRequestInput("{}", "0.8.44"); @Test public void testGetSuccess() throws Exception { @@ -27,7 +25,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -37,11 +36,8 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); } @Test @@ -50,7 +46,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -59,9 +56,6 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java index 532b9b89f3a991..7866d3de3c9324 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -20,9 +23,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetIngestionExecutionRequestResolverTest { @Test @@ -33,32 +33,48 @@ public void testGetSuccess() throws Exception { ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -69,7 +85,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -78,7 +95,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); } @Test @@ -87,13 +105,15 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index 25f3ccbd47cd65..fe4fe00454a261 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -18,75 +22,89 @@ import com.linkedin.execution.ExecutionRequestResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class IngestionSourceExecutionRequestsResolverTest { @Test public void testGetSuccess() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); // Mock filter response - Mockito.when(mockClient.filter( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) - .thenReturn(new SearchResult() - .setFrom(0) - .setPageSize(10) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableList.of( - new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN)))) - ); + Mockito.when( + mockClient.filter( + any(), + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.any(Filter.class), + Mockito.any(List.class), + Mockito.eq(0), + Mockito.eq(10))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(10) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN))))); // Mock batch get response ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -99,17 +117,19 @@ public void testGetSuccess() throws Exception { assertEquals((int) executionRequests.getStart(), 0); assertEquals((int) executionRequests.getCount(), 10); assertEquals((int) executionRequests.getTotal(), 1); - verifyTestExecutionRequest(executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); + verifyTestExecutionRequest( + executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); } @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + EntityClient mockClient = mock(EntityClient.class); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); @@ -119,32 +139,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getSource()).thenReturn(parentSource); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + Mockito.verify(mockClient, Mockito.times(0)) + .list(Mockito.any(), Mockito.any(), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + EntityClient mockClient = mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + DataFetchingEnvironment mockEnv = mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("start"))).thenReturn(0); Mockito.when(mockEnv.getArgument(Mockito.eq("count"))).thenReturn(10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index c7a72e475f7abd..ecedd8fdf68b0c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; import com.linkedin.entity.client.EntityClient; @@ -8,10 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class RollbackIngestionResolverTest { private static final String RUN_ID = "testRunId"; @@ -46,9 +45,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .rollbackIngestion(any(), Mockito.eq(RUN_ID), any()); } @Test @@ -59,24 +57,20 @@ public void testRollbackIngestionMethod() throws Exception { QueryContext mockContext = getMockAllowContext(); resolver.rollbackIngestion(RUN_ID, mockContext).get(); - Mockito.verify(mockClient, Mockito.times(1)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .rollbackIngestion(any(), Mockito.eq(RUN_ID), any()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion(any(), any(), any()); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); QueryContext mockContext = getMockAllowContext(); - assertThrows(RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); + assertThrows( + RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java index 2d64d4ec56ba18..85ef304d285338 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java @@ -6,7 +6,6 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.ArgumentMatcher; - public class CreateSecretResolverMatcherTest implements ArgumentMatcher { private MetadataChangeProposal left; @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean secretPropertiesMatch(GenericAspect left, GenericAspect right) { - DataHubSecretValue leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DataHubSecretValue.class - ); - - DataHubSecretValue rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DataHubSecretValue.class - ); + DataHubSecretValue leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DataHubSecretValue.class); + + DataHubSecretValue rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DataHubSecretValue.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index 18ae71661318eb..d87556827ad648 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; -import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -11,33 +14,27 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateSecretResolverTest { - private static final CreateSecretInput TEST_INPUT = new CreateSecretInput( - "MY_SECRET", - "mysecretvalue", - "none" - ); + private static final CreateSecretInput TEST_INPUT = + new CreateSecretInput("MY_SECRET", "mysecretvalue", "none"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))).thenReturn("encryptedvalue"); + Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))) + .thenReturn("encryptedvalue"); CreateSecretResolver resolver = new CreateSecretResolver(mockClient, mockSecretService); // Execute resolver @@ -57,18 +54,21 @@ public void testGetSuccess() throws Exception { value.setValue("encryptedvalue"); value.setName(TEST_INPUT.getName()); value.setDescription(TEST_INPUT.getDescription()); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateSecretResolverMatcherTest(new MetadataChangeProposal() - .setChangeType(ChangeType.UPSERT) - .setEntityType(Constants.SECRETS_ENTITY_NAME) - .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) - .setAspect(GenericRecordUtils.serializeAspect(value)) - .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + value.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + any(), + Mockito.argThat( + new CreateSecretResolverMatcherTest( + new MetadataChangeProposal() + .setChangeType(ChangeType.UPSERT) + .setEntityType(Constants.SECRETS_ENTITY_NAME) + .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(value)) + .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), + Mockito.eq(false)); } @Test @@ -80,23 +80,20 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -108,4 +105,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java index 679425afbf2e7f..e4437840fc8896 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; import com.linkedin.r2.RemoteInvocationException; @@ -8,9 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteSecretResolverTest { @Test @@ -22,11 +23,11 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_SECRET_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(any(), eq(TEST_SECRET_URN)); } @Test @@ -38,24 +39,26 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), eq(TEST_SECRET_URN)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(any(), eq(TEST_SECRET_URN)); DeleteSecretResolver resolver = new DeleteSecretResolver(mockClient); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); + Mockito.when(mockEnv.getArgument(eq("urn"))).thenReturn(TEST_SECRET_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 0042d34e602cc1..b49f9313996b02 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -13,23 +16,19 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetSecretValuesResolverTest { - private static final GetSecretValuesInput TEST_INPUT = new GetSecretValuesInput( - ImmutableList.of(getTestSecretValue().getName()) - ); + private static final GetSecretValuesInput TEST_INPUT = + new GetSecretValuesInput(ImmutableList.of(getTestSecretValue().getName())); @Test public void testGetSuccess() throws Exception { @@ -39,27 +38,29 @@ public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))).thenReturn(decryptedSecretValue); + Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))) + .thenReturn(decryptedSecretValue); DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); @@ -90,22 +91,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); SecretService mockSecretService = Mockito.mock(SecretService.class); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index ad91c214db28f2..96a12dc3be5a76 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -11,27 +15,21 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; -import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListSecretsResolverTest { - private static final ListSecretsInput TEST_INPUT = new ListSecretsInput( - 0, 20, null - ); + private static final ListSecretsInput TEST_INPUT = new ListSecretsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { @@ -40,40 +38,42 @@ public void testGetSuccess() throws Exception { DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.search( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(List.class), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN))))); + + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver @@ -99,36 +99,31 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), + Mockito.any(), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(List.class), + Mockito.anyInt(), + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java new file mode 100644 index 00000000000000..2e253ebc8161b1 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java @@ -0,0 +1,96 @@ +package com.linkedin.datahub.graphql.resolvers.ingest.secret; + +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.SECRET_VALUE_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.Mockito.when; + +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.UpdateSecretInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.secret.DataHubSecretValue; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdateSecretResolverTest { + + private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:secret:secret-id"); + + private static final UpdateSecretInput TEST_INPUT = + new UpdateSecretInput(TEST_URN.toString(), "MY_SECRET", "mysecretvalue", "dummy"); + + private DataFetchingEnvironment mockEnv; + private EntityClient mockClient; + private SecretService mockSecretService; + private UpdateSecretResolver resolver; + + @BeforeMethod + public void before() { + mockClient = Mockito.mock(EntityClient.class); + mockSecretService = Mockito.mock(SecretService.class); + + resolver = new UpdateSecretResolver(mockClient, mockSecretService); + } + + private DataHubSecretValue createSecretAspect() { + DataHubSecretValue secretAspect = new DataHubSecretValue(); + secretAspect.setValue("encryptedvalue.updated"); + secretAspect.setName(TEST_INPUT.getName() + ".updated"); + secretAspect.setDescription(TEST_INPUT.getDescription() + ".updated"); + secretAspect.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + return secretAspect; + } + + @Test + public void testGetSuccess() throws Exception { + // with valid context + QueryContext mockContext = getMockAllowContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + Mockito.when(mockClient.exists(any(), any())).thenReturn(true); + Mockito.when(mockSecretService.encrypt(any())).thenReturn("encrypted_value"); + final EntityResponse entityResponse = new EntityResponse(); + final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(createSecretAspect().data()))); + entityResponse.setAspects(aspectMap); + + when(mockClient.getV2(any(), any(), any(), any())).thenReturn(entityResponse); + + // Invoke the resolver + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(1)).ingestProposal(any(), any(), anyBoolean()); + } + + @Test( + description = "validate if nothing provided throws Exception", + expectedExceptions = {AuthorizationException.class, CompletionException.class}) + public void testGetUnauthorized() throws Exception { + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).join(); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java index c898ae72807101..2855314506e2cb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; import com.linkedin.r2.RemoteInvocationException; @@ -8,9 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteIngestionSourceResolverTest { @Test @@ -22,11 +23,13 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_INGESTION_SOURCE_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(mockContext.getOperationContext(), TEST_INGESTION_SOURCE_URN); } @Test @@ -38,24 +41,28 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), eq(TEST_INGESTION_SOURCE_URN)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(any(), Mockito.eq(TEST_INGESTION_SOURCE_URN)); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DeleteIngestionSourceResolver resolver = new DeleteIngestionSourceResolver(mockClient); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java index ebafd1782e000d..22db930921374b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -14,13 +17,9 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.HashSet; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.assertThrows; - public class GetIngestionSourceResolverTest { @Test @@ -30,29 +29,31 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo returnedInfo = getTestIngestionSourceInfo(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInfo.data()))))))); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -72,28 +73,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index 8e2453ce06a398..cd3b5c9dce47e8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -13,23 +17,20 @@ import com.linkedin.ingestion.DataHubIngestionSourceInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubIngestionSourceKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListIngestionSourceResolverTest { - private static final ListIngestionSourcesInput TEST_INPUT = new ListIngestionSourcesInput(0, 20, null, null); + private static final ListIngestionSourcesInput TEST_INPUT = + new ListIngestionSourcesInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { @@ -40,45 +41,51 @@ public void testGetSuccess() throws Exception { final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(""), - Mockito.any(), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), - Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(key.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN))))); + + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(key.data()))))))); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockContext.getOperationContext()).thenReturn(mock(OperationContext.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -88,7 +95,8 @@ public void testGetSuccess() throws Exception { assertEquals(resolver.get(mockEnv).get().getCount(), 1); assertEquals(resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getIngestionSources().size(), 1); - verifyTestIngestionSourceGraphQL(resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); + verifyTestIngestionSourceGraphQL( + resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); } @Test @@ -100,35 +108,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java index 16d8da9169a8fc..955188a4e4fed1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java @@ -1,7 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.verifyIngestProposal; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceConfigInput; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceInput; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceScheduleInput; @@ -15,19 +22,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpsertIngestionSourceResolverTest { - private static final UpdateIngestionSourceInput TEST_INPUT = new UpdateIngestionSourceInput( - "Test source", - "mysql", "Test source description", - new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), - new UpdateIngestionSourceConfigInput("my test recipe", "0.8.18", "executor id", false, null) - ); + private static final UpdateIngestionSourceInput TEST_INPUT = makeInput(); + + private static UpdateIngestionSourceInput makeInput() { + return new UpdateIngestionSourceInput( + "Test source", + "mysql", + "Test source description", + new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), + new UpdateIngestionSourceConfigInput( + "my test recipe", "0.8.18", "executor id", false, null)); + } @Test public void testGetSuccess() throws Exception { @@ -38,7 +45,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -48,24 +56,22 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setType(TEST_INPUT.getType()); info.setName(TEST_INPUT.getName()); - info.setSchedule(new DataHubIngestionSourceSchedule() - .setInterval(TEST_INPUT.getSchedule().getInterval()) - .setTimezone(TEST_INPUT.getSchedule().getTimezone()) - ); - info.setConfig(new DataHubIngestionSourceConfig() - .setRecipe(TEST_INPUT.getConfig().getRecipe()) - .setVersion(TEST_INPUT.getConfig().getVersion()) - .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) - .setDebugMode(TEST_INPUT.getConfig().getDebugMode()) - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(MutationUtils.buildMetadataChangeProposalWithUrn(TEST_INGESTION_SOURCE_URN, - INGESTION_INFO_ASPECT_NAME, info) - ), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + info.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval(TEST_INPUT.getSchedule().getInterval()) + .setTimezone(TEST_INPUT.getSchedule().getTimezone())); + info.setConfig( + new DataHubIngestionSourceConfig() + .setRecipe(TEST_INPUT.getConfig().getRecipe()) + .setVersion(TEST_INPUT.getConfig().getVersion()) + .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) + .setDebugMode(TEST_INPUT.getConfig().getDebugMode())); + + verifyIngestProposal( + mockClient, + 1, + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_INGESTION_SOURCE_URN, INGESTION_INFO_ASPECT_NAME, info)); } @Test @@ -77,24 +83,20 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), any(), Mockito.eq(false)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -105,4 +107,54 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } + + @Test + public void testUpsertWithInvalidCron() throws Exception { + final UpdateIngestionSourceInput input = makeInput(); + input.setSchedule(new UpdateIngestionSourceScheduleInput("* * * * 123", "UTC")); + + // Create resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(DataHubGraphQLException.class, () -> resolver.get(mockEnv).join()); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); + + input.setSchedule(new UpdateIngestionSourceScheduleInput("null", "UTC")); + assertThrows(DataHubGraphQLException.class, () -> resolver.get(mockEnv).join()); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); + } + + @Test + public void testUpsertWithInvalidTimezone() throws Exception { + final UpdateIngestionSourceInput input = makeInput(); + input.setSchedule(new UpdateIngestionSourceScheduleInput("* * * * *", "Invalid")); + + // Create resolver + EntityClient mockClient = Mockito.mock(EntityClient.class); + UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(DataHubGraphQLException.class, () -> resolver.get(mockEnv).join()); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); + + input.setSchedule(new UpdateIngestionSourceScheduleInput("* * * * *", "America/Los_Angel")); + assertThrows(DataHubGraphQLException.class, () -> resolver.get(mockEnv).join()); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java new file mode 100644 index 00000000000000..aa9b87922e6cb0 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolverTest.java @@ -0,0 +1,120 @@ +package com.linkedin.datahub.graphql.resolvers.load; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.graphql.generated.Dashboard; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.entity.EntityService; +import graphql.schema.DataFetchingEnvironment; +import java.util.List; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class BatchGetEntitiesResolverTest { + private EntityClient _entityClient; + private EntityService _entityService; + private DataFetchingEnvironment _dataFetchingEnvironment; + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _entityClient = mock(EntityClient.class); + } + + List getRequestEntities(List urnList) { + + return urnList.stream() + .map( + urn -> { + if (urn.startsWith("urn:li:dataset")) { + Dataset entity = new Dataset(); + entity.setUrn(urn); + return entity; + } else if (urn.startsWith("urn:li:dashboard")) { + Dashboard entity = new Dashboard(); + entity.setUrn(urn); + return entity; + } else { + throw new RuntimeException("Can't handle urn " + urn); + } + }) + .collect(Collectors.toList()); + } + + @Test + /** Tests that if responses come back out of order, we stitch them back correctly */ + public void testReordering() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:1", "urn:li:dataset:2")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity1 = new Dataset(); + mockResponseEntity1.setUrn("urn:li:dataset:1"); + + Dataset mockResponseEntity2 = new Dataset(); + mockResponseEntity2.setUrn("urn:li:dataset:2"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture( + ImmutableList.of(mockResponseEntity2, mockResponseEntity1)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any(), any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity1); + assertEquals(batchGetResponse.get(1), mockResponseEntity2); + } + + @Test + /** Tests that if input list contains duplicates, we stitch them back correctly */ + public void testDuplicateUrns() throws Exception { + Function entityProvider = mock(Function.class); + List inputEntities = + getRequestEntities(ImmutableList.of("urn:li:dataset:foo", "urn:li:dataset:foo")); + when(entityProvider.apply(any())).thenReturn(inputEntities); + BatchGetEntitiesResolver resolver = + new BatchGetEntitiesResolver( + ImmutableList.of(new DatasetType(_entityClient)), entityProvider); + + DataLoaderRegistry mockDataLoaderRegistry = mock(DataLoaderRegistry.class); + when(_dataFetchingEnvironment.getDataLoaderRegistry()).thenReturn(mockDataLoaderRegistry); + DataLoader mockDataLoader = mock(DataLoader.class); + when(mockDataLoaderRegistry.getDataLoader(any())).thenReturn(mockDataLoader); + + Dataset mockResponseEntity = new Dataset(); + mockResponseEntity.setUrn("urn:li:dataset:foo"); + + CompletableFuture mockFuture = + CompletableFuture.completedFuture(ImmutableList.of(mockResponseEntity)); + when(mockDataLoader.loadMany(any())).thenReturn(mockFuture); + when(_entityService.exists(any(), any(List.class), eq(true))) + .thenAnswer(args -> Set.of(args.getArgument(0))); + List batchGetResponse = resolver.get(_dataFetchingEnvironment).join(); + assertEquals(batchGetResponse.size(), 2); + assertEquals(batchGetResponse.get(0), mockResponseEntity); + assertEquals(batchGetResponse.get(1), mockResponseEntity); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolverTest.java new file mode 100644 index 00000000000000..d2799278c1238d --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolverTest.java @@ -0,0 +1,124 @@ +package com.linkedin.datahub.graphql.resolvers.load; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + +import com.linkedin.common.EntityRelationship; +import com.linkedin.common.EntityRelationshipArray; +import com.linkedin.common.EntityRelationships; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.*; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.graph.GraphClient; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class EntityRelationshipsResultResolverTest { + private final Urn existentUser = Urn.createFromString("urn:li:corpuser:johndoe"); + private final Urn softDeletedUser = Urn.createFromString("urn:li:corpuser:deletedUser"); + + private CorpUser existentEntity; + private CorpUser softDeletedEntity; + + private EntityService _entityService; + private GraphClient _graphClient; + + private EntityRelationshipsResultResolver resolver; + private RelationshipsInput input; + private DataFetchingEnvironment mockEnv; + + public EntityRelationshipsResultResolverTest() throws URISyntaxException {} + + @BeforeMethod + public void setupTest() { + _entityService = mock(EntityService.class); + _graphClient = mock(GraphClient.class); + resolver = new EntityRelationshipsResultResolver(_graphClient, _entityService); + + mockEnv = mock(DataFetchingEnvironment.class); + QueryContext context = getMockAllowContext(); + when(mockEnv.getContext()).thenReturn(context); + + CorpGroup source = new CorpGroup(); + source.setUrn("urn:li:corpGroup:group1"); + when(mockEnv.getSource()).thenReturn(source); + + when(_entityService.exists(any(), eq(Set.of(existentUser, softDeletedUser)), eq(true))) + .thenReturn(Set.of(existentUser, softDeletedUser)); + when(_entityService.exists(any(), eq(Set.of(existentUser, softDeletedUser)), eq(false))) + .thenReturn(Set.of(existentUser)); + + input = new RelationshipsInput(); + input.setStart(0); + input.setCount(10); + input.setDirection(RelationshipDirection.INCOMING); + input.setTypes(List.of("SomeType")); + + EntityRelationships entityRelationships = + new EntityRelationships() + .setStart(0) + .setCount(2) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + new EntityRelationship().setEntity(existentUser).setType("SomeType"), + new EntityRelationship().setEntity(softDeletedUser).setType("SomeType"))); + + // always expected INCOMING, and "SomeType" in all tests + when(_graphClient.getRelatedEntities( + eq(source.getUrn()), + eq(input.getTypes()), + same(com.linkedin.metadata.query.filter.RelationshipDirection.INCOMING), + eq(input.getStart()), + eq(input.getCount()), + any())) + .thenReturn(entityRelationships); + + when(mockEnv.getArgument(eq("input"))).thenReturn(input); + + existentEntity = new CorpUser(); + existentEntity.setUrn(existentUser.toString()); + existentEntity.setType(EntityType.CORP_USER); + + softDeletedEntity = new CorpUser(); + softDeletedEntity.setUrn(softDeletedUser.toString()); + softDeletedEntity.setType(EntityType.CORP_USER); + } + + @Test + public void testIncludeSoftDeleted() throws ExecutionException, InterruptedException { + EntityRelationshipsResult expected = new EntityRelationshipsResult(); + expected.setRelationships( + List.of(resultRelationship(existentEntity), resultRelationship(softDeletedEntity))); + expected.setStart(0); + expected.setCount(2); + expected.setTotal(2); + assertEquals(resolver.get(mockEnv).get().toString(), expected.toString()); + } + + @Test + public void testExcludeSoftDeleted() throws ExecutionException, InterruptedException { + input.setIncludeSoftDelete(false); + EntityRelationshipsResult expected = new EntityRelationshipsResult(); + expected.setRelationships(List.of(resultRelationship(existentEntity))); + expected.setStart(0); + expected.setCount(1); + expected.setTotal(1); + assertEquals(resolver.get(mockEnv).get().toString(), expected.toString()); + } + + private com.linkedin.datahub.graphql.generated.EntityRelationship resultRelationship( + Entity entity) { + return new com.linkedin.datahub.graphql.generated.EntityRelationship( + "SomeType", RelationshipDirection.INCOMING, entity, null); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 61dd6c678e6e0f..42afb04d5734e9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -14,161 +18,168 @@ import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.testng.annotations.Test; -import com.linkedin.entity.Aspect; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class MutableTypeBatchResolverTest { - private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; - private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; - private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; - private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; - private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; - private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; - private static final Deprecation TEST_DATASET_1_DEPRECATION; - - static { - try { - TEST_DATASET_1_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; + private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; + private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; + private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; + private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; + private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; + private static final Deprecation TEST_DATASET_1_DEPRECATION; + + static { + try { + TEST_DATASET_1_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } - - private static final Deprecation TEST_DATASET_2_DEPRECATION; - - static { - try { - TEST_DATASET_2_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + } + + private static final Deprecation TEST_DATASET_2_DEPRECATION; + + static { + try { + TEST_DATASET_2_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } + } - @Test - public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); + BatchMutableType batchMutableType = + new DatasetType(mockClient); - List mockInputs = Arrays.asList( + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + List mockInputs = + Arrays.asList( new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_1_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build(), + .setUrn(TEST_DATASET_1_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .build()) + .build()) + .build(), new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_2_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build() - ); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Authentication mockAuth = Mockito.mock(Authentication.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); - Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - - Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); - Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); - - Mockito.when(mockClient.batchGetV2(Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), - Mockito.any(), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - datasetUrn1, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data())) - ))), - datasetUrn2, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn2) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data())) - ))) - )); - - List result = resolver.get(mockEnv).join(); - - ArgumentCaptor> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( + .setUrn(TEST_DATASET_2_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .build()) + .build()) + .build()); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Authentication mockAuth = Mockito.mock(Authentication.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); + Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + + Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); + Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); + + Mockito.when( + mockClient.batchGetV2( + any(), Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), - // Dataset aspects to fetch are private, but aren't important for this test - Mockito.any(), - Mockito.any(Authentication.class) - ); - Collection changeProposals = changeProposalCaptor.getValue(); - - assertEquals(changeProposals.size(), 2); - assertEquals(result.size(), 2); - } - - @Test - public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); - - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - } + Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), + Mockito.any())) + .thenReturn( + ImmutableMap.of( + datasetUrn1, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data()))))), + datasetUrn2, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn2) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data()))))))); + + List result = resolver.get(mockEnv).join(); + + ArgumentCaptor> changeProposalCaptor = + ArgumentCaptor.forClass((Class) Collection.class); + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals(any(), changeProposalCaptor.capture(), Mockito.eq(false)); + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), + // Dataset aspects to fetch are private, but aren't important for this test + Mockito.any()); + Collection changeProposals = changeProposalCaptor.getValue(); + + assertEquals(changeProposals.size(), 2); + assertEquals(result.size(), 2); + } + + @Test + public void testGetFailureUnauthorized() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + + BatchMutableType batchMutableType = + new DatasetType(mockClient); + + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java index 1adf7b1200574e..e76317391ac34b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java @@ -1,60 +1,80 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.Siblings; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils; import com.linkedin.metadata.entity.EntityService; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import io.datahubproject.metadata.context.OperationContext; import java.util.HashSet; import java.util.Optional; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; -import static org.testng.AssertJUnit.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class SiblingsUtilsTest { - private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; - private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; - private static final String TEST_DATASET_URN3 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; + private static final String TEST_DATASET_URN1 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; + private static final String TEST_DATASET_URN2 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; + private static final String TEST_DATASET_URN3 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; @Test public void testGetSiblingUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings().setSiblings(siblingUrns) - ); - - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + EntityService mockService = mock(EntityService.class); + Mockito.when( + mockService.getLatestAspect( + any(), eq(UrnUtils.getUrn(TEST_DATASET_URN1)), eq(SIBLINGS_ASPECT_NAME))) + .thenReturn(new Siblings().setSiblings(siblingUrns)); + + assertEquals( + SiblingsUtils.getSiblingUrns( + mock(OperationContext.class), UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + siblingUrns); } @Test public void testGetSiblingUrnsWithoutSiblings() { - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings() - ); - - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + EntityService mockService = mock(EntityService.class); + Mockito.when( + mockService.getLatestAspect( + any(), eq(UrnUtils.getUrn(TEST_DATASET_URN1)), eq(SIBLINGS_ASPECT_NAME))) + .thenReturn(new Siblings()); + + assertEquals( + SiblingsUtils.getSiblingUrns( + mock(OperationContext.class), UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetSiblingUrnsWithSiblingsAspect() { - EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - null - ); - - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + EntityService mockService = mock(EntityService.class); + Mockito.when( + mockService.getLatestAspect( + any(), eq(UrnUtils.getUrn(TEST_DATASET_URN1)), eq(SIBLINGS_ASPECT_NAME))) + .thenReturn(null); + + assertEquals( + SiblingsUtils.getSiblingUrns( + mock(OperationContext.class), UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetNextSiblingUrn() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, new HashSet<>()); assertEquals(nextUrn, Optional.of(UrnUtils.getUrn(TEST_DATASET_URN2))); @@ -62,7 +82,8 @@ public void testGetNextSiblingUrn() { @Test public void testGetNextSiblingUrnWithUsedUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); HashSet usedUrns = new HashSet<>(); usedUrns.add(UrnUtils.getUrn(TEST_DATASET_URN2)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, usedUrns); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 9bd44e9ab09065..313c15c95c952f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -12,17 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; + @Test public void testWriteCorpUserSettings() throws Exception { - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_USER_URN))).thenReturn(true); + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_USER_URN)), eq(true))) + .thenReturn(true); UpdateUserSettingResolver resolver = new UpdateUserSettingResolver(mockService); @@ -36,9 +39,12 @@ public void testWriteCorpUserSettings() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - CorpUserSettings newSettings = new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), - CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + CorpUserSettings newSettings = + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); verifySingleIngestProposal(mockService, 1, proposal); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java index e2661841fe8f70..c4778cbbd40535 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.operation; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; import com.linkedin.common.OperationType; @@ -16,37 +20,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class ReportOperationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Operation expectedOperation = new Operation() - .setTimestampMillis(0L) - .setLastUpdatedTimestamp(0L) - .setOperationType(OperationType.INSERT) - .setSourceType(OperationSourceType.DATA_PLATFORM) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) - .setCustomOperationType(null, SetMode.IGNORE_NULL) - .setNumAffectedRows(1L); + Operation expectedOperation = + new Operation() + .setTimestampMillis(0L) + .setLastUpdatedTimestamp(0L) + .setOperationType(OperationType.INSERT) + .setSourceType(OperationSourceType.DATA_PLATFORM) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setCustomOperationType(null, SetMode.IGNORE_NULL) + .setNumAffectedRows(1L); - MetadataChangeProposal expectedProposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - OPERATION_ASPECT_NAME, expectedOperation); + MetadataChangeProposal expectedProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), OPERATION_ASPECT_NAME, expectedOperation); // Test setting the domain - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class))) - .thenReturn(TEST_ENTITY_URN); + Mockito.when(mockClient.ingestProposal(any(), Mockito.eq(expectedProposal))) + .thenReturn(TEST_ENTITY_URN); ReportOperationResolver resolver = new ReportOperationResolver(mockClient); @@ -57,11 +57,7 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, expectedProposal); } @Test @@ -77,9 +73,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any()); } private ReportOperationInput getTestInput() { @@ -91,4 +85,4 @@ private ReportOperationInput getTestInput() { input.setSourceType(com.linkedin.datahub.graphql.generated.OperationSourceType.DATA_PLATFORM); return input; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 329d71ec125db0..b239e0300ffcc5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -18,39 +22,48 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddOwnersResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_OWNER_1_URN = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_2_URN = "urn:li:corpuser:test-id-2"; private static final String TEST_OWNER_3_URN = "urn:li:corpGroup:test-id-3"; @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -58,12 +71,20 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), - new OwnerInput(TEST_OWNER_2_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) - ), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), + new OwnerInput( + TEST_OWNER_2_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -71,38 +92,51 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); } @Test public void testGetSuccessExistingOwnerNewType() throws Exception { - EntityService mockService = getMockEntityService(); - - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + EntityService mockService = getMockEntityService(); + + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); + + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -110,13 +144,16 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -124,34 +161,45 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { - EntityService mockService = getMockEntityService(); - - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + EntityService mockService = getMockEntityService(); + + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); + + Mockito.when(mockService.exists(any(), any(Urn.class), eq(true))).thenReturn(true); + + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -159,12 +207,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -172,39 +224,62 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); } @Test public void testGetSuccessMultipleOwnerTypes() throws Exception { - EntityService mockService = getMockEntityService(); - - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); - - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) - .thenReturn(true); + EntityService mockService = getMockEntityService(); + + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); + + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true))) + .thenReturn(true); + + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -212,22 +287,28 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_2_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_3_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_GROUP) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -235,40 +316,47 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)), eq(true)); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(false); AddOwnersResolver resolver = new AddOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -278,25 +366,35 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_1_URN)), eq(true))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -306,15 +404,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); AddOwnersResolver resolver = new AddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -325,23 +429,29 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 79fc62742f4442..8275f9f83ef83f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -17,47 +21,60 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -65,66 +82,98 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test public void testGetSuccessExistingOwners() throws Exception { - final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership originalOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name()))), + eq(true))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + any(), + eq( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + eq(true))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -132,68 +181,83 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of( - new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true)); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(false); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -203,42 +267,56 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -248,26 +326,33 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -278,34 +363,41 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 9dc2ec81278069..9ea9ac693b98ed 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -14,56 +18,63 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveOwnersResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @Test public void testGetSuccessNoExistingOwners() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of( - TEST_OWNER_URN_1, - TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -72,44 +83,65 @@ public void testGetSuccessNoExistingOwners() throws Exception { @Test public void testGetSuccessExistingOwners() throws Exception { - EntityService mockService = getMockEntityService(); - - final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + final Ownership oldOwners1 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners1); - final Ownership oldOwners2 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_2)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners2 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_2)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(oldOwners2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_2)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -118,33 +150,43 @@ public void testGetSuccessExistingOwners() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.OWNERSHIP_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_OWNER_URN_1)), eq(true))) + .thenReturn(true); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -153,17 +195,20 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -173,24 +218,27 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java index 0643ead444c948..6b2deeead2ee7b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java @@ -1,12 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.ownership; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.service.OwnershipTypeService; @@ -15,15 +19,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateOwnershipTypeResolverTest { - private static final CreateOwnershipTypeInput TEST_INPUT = new CreateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final CreateOwnershipTypeInput TEST_INPUT = + new CreateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); @@ -45,10 +45,12 @@ public void testCreateSuccess() throws Exception { assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); assertEquals(ownershipType.getType(), EntityType.CUSTOM_OWNERSHIP_TYPE); - Mockito.verify(mockService, Mockito.times(1)).createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createOwnershipType( + any(), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.anyLong()); } @Test @@ -65,20 +67,16 @@ public void testCreateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testCreateOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createOwnershipType( - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createOwnershipType(any(), Mockito.any(), Mockito.any(), Mockito.anyLong()); CreateOwnershipTypeResolver resolver = new CreateOwnershipTypeResolver(mockService); @@ -93,12 +91,13 @@ public void testCreateOwnershipTypeServiceException() throws Exception { private OwnershipTypeService initMockService() { OwnershipTypeService service = Mockito.mock(OwnershipTypeService.class); - Mockito.when(service.createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_OWNERSHIP_TYPE_URN); + Mockito.when( + service.createOwnershipType( + any(), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.anyLong())) + .thenReturn(TEST_OWNERSHIP_TYPE_URN); return service; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java index 9f526e40082366..be3c4e1ed6360f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,11 +17,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.Assert.*; - - public class DeleteOwnershipTypeResolverTest { private static final Urn TEST_URN = @@ -41,11 +39,8 @@ public void testGetSuccessOwnershipTypeCanManage() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteOwnershipType(any(), Mockito.eq(TEST_URN), anyBoolean()); } @Test @@ -62,21 +57,17 @@ public void testGetFailureOwnershipTypeCanNotManager() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteOwnershipType(any(), Mockito.eq(TEST_URN), anyBoolean()); } @Test public void testGetOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteOwnershipType( - Mockito.any(), - anyBoolean(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteOwnershipType(any(), Mockito.any(), anyBoolean()); DeleteOwnershipTypeResolver resolver = new DeleteOwnershipTypeResolver(mockService); @@ -93,17 +84,16 @@ public void testGetOwnershipTypeServiceException() throws Exception { private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when(mockService.getOwnershipTypeInfo(any(), Mockito.eq(TEST_URN))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index ceab13167246c8..e72eaa29fdc989 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -8,7 +11,6 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.OwnershipTypeKey; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -18,16 +20,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListOwnershipTypesResolverTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); - private static final ListOwnershipTypesInput TEST_INPUT = new ListOwnershipTypesInput(0, 20, "", null); + private static final ListOwnershipTypesInput TEST_INPUT = + new ListOwnershipTypesInput(0, 20, "", null); @Test public void testGetSuccess() throws Exception { @@ -38,21 +37,23 @@ public void testGetSuccess() throws Exception { final OwnershipTypeKey key = new OwnershipTypeKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN)))) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN))))); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); @@ -78,35 +79,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.any(), Mockito.anySet(), Mockito.anySet()); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); // Execute resolver @@ -124,4 +119,4 @@ public static OwnershipTypeInfo getOwnershipTypeInfo() { info.setDescription("some description"); return info; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java index f35b8f98cc1acf..07e4d95e14d97a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java @@ -1,14 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.ownership; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; @@ -24,19 +28,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateOwnershipTypeResolverTest { private static final Urn TEST_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateOwnershipTypeInput TEST_INPUT = new UpdateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final UpdateOwnershipTypeInput TEST_INPUT = + new UpdateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); @Test public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { @@ -55,23 +55,23 @@ public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { assertEquals(ownershipType.getInfo().getName(), TEST_INPUT.getName()); assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); - Mockito.verify(mockService, Mockito.times(1)).updateOwnershipType( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateOwnershipType( + any(), + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.anyLong()); } @Test public void testUpdateOwnershipTypeServiceException() throws Exception { // Update resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateOwnershipType( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateOwnershipType( + any(), Mockito.any(Urn.class), Mockito.any(), Mockito.any(), Mockito.anyLong()); UpdateOwnershipTypeResolver resolver = new UpdateOwnershipTypeResolver(mockService); @@ -100,39 +100,36 @@ public void testUpdateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getOwnershipTypeInfo(any(), Mockito.eq(TEST_URN))) .thenReturn(testInfo); - Mockito.when(mockService.getOwnershipTypeEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when(mockService.getOwnershipTypeEntityResponse(any(), Mockito.eq(TEST_URN))) .thenReturn(testEntityResponse); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java index b56d897a468ba8..7f14193737e00e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreatePostResolverTest { private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; private static final String POST_MEDIA_LOCATION = @@ -59,9 +58,12 @@ public void testCreatePost() throws Exception { UpdateMediaInput media = new UpdateMediaInput(); media.setType(POST_MEDIA_TYPE); media.setLocation(POST_MEDIA_LOCATION); - Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION)); - when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))) + .thenReturn(mediaObj); UpdatePostContentInput content = new UpdatePostContentInput(); content.setTitle(POST_TITLE); @@ -69,22 +71,31 @@ public void testCreatePost() throws Exception { content.setLink(POST_LINK); content.setContentType(POST_CONTENT_TYPE); content.setMedia(media); - com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( - com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION))); - when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), - eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); CreatePostInput input = new CreatePostInput(); input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); input.setContent(content); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), - eq(_authentication))).thenReturn(true); + when(_postService.createPost( + any(), eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj))) + .thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java index b8a7488a824fd4..64eea926c1fff4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.urn.Urn; @@ -9,11 +13,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class DeletePostResolverTest { private static final String POST_URN_STRING = "urn:li:post:123"; private PostService _postService; @@ -46,7 +45,7 @@ public void testDeletePost() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_dataFetchingEnvironment.getArgument(eq("urn"))).thenReturn(POST_URN_STRING); - when(_postService.deletePost(eq(postUrn), eq(_authentication))).thenReturn(true); + when(_postService.deletePost(any(), eq(postUrn))).thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index c22d6bf39640d4..165dcc7cda5b89 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -15,7 +20,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -28,17 +32,9 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; - -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListPostsResolverTest { private static Map _entityResponseMap; private static final String POST_URN_STRING = "urn:li:post:examplePost"; @@ -49,12 +45,15 @@ public class ListPostsResolverTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private EntityClient _entityClient; @@ -72,8 +71,11 @@ private Map getMockPostsEntityResponse() throws URISyntaxEx DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(postUrn.toString()); dataHubRoleInfo.setName(postUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return ImmutableMap.of(postUrn, entityResponse); } @@ -106,13 +108,20 @@ public void testListPosts() throws Exception { ListPostsInput input = new ListPostsInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(1); roleSearchResult.setEntities( - new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); - - when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), - eq(_authentication), Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search( + any(), eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt())) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(any(), eq(POST_ENTITY_NAME), any(), any())) + .thenReturn(_entityResponseMap); ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java new file mode 100644 index 00000000000000..a2b22c075d0dae --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/UpdatePostResolverTest.java @@ -0,0 +1,118 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.Mockito.any; +import static org.mockito.Mockito.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.MediaType; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import com.linkedin.datahub.graphql.generated.UpdatePostInput; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class UpdatePostResolverTest { + + private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:post:post-id"); + private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; + private static final String POST_MEDIA_LOCATION = + "https://datahubproject.io/img/datahub-logo-color-light-horizontal.svg"; + private static final PostContentType POST_CONTENT_TYPE = PostContentType.LINK; + private static final String POST_TITLE = "title"; + private static final String POST_DESCRIPTION = "description"; + private static final String POST_LINK = "https://datahubproject.io"; + private PostService postService; + private UpdatePostResolver resolver; + private DataFetchingEnvironment dataFetchingEnvironment; + private Authentication authentication; + + @BeforeMethod + public void setupTest() throws Exception { + postService = mock(PostService.class); + dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + authentication = mock(Authentication.class); + + resolver = new UpdatePostResolver(postService); + } + + @Test + public void testNotAuthorizedFails() { + QueryContext mockContext = getMockDenyContext(); + when(dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> resolver.get(dataFetchingEnvironment).join()); + } + + @Test + public void testUpdatePost() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(authentication); + + UpdateMediaInput media = new UpdateMediaInput(); + media.setType(POST_MEDIA_TYPE); + media.setLocation(POST_MEDIA_LOCATION); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(postService.mapMedia(POST_MEDIA_TYPE.toString(), POST_MEDIA_LOCATION)) + .thenReturn(mediaObj); + + UpdatePostContentInput content = new UpdatePostContentInput(); + content.setTitle(POST_TITLE); + content.setDescription(POST_DESCRIPTION); + content.setLink(POST_LINK); + content.setContentType(POST_CONTENT_TYPE); + content.setMedia(media); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); + + UpdatePostInput input = new UpdatePostInput(); + input.setUrn(TEST_URN.toString()); + input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); + input.setContent(content); + when(dataFetchingEnvironment.getArgument("input")).thenReturn(input); + when(postService.updatePost( + any(), + eq(TEST_URN), + eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), + eq(postContentObj))) + .thenReturn(true); + + assertTrue(resolver.get(dataFetchingEnvironment).join()); + verify(postService, times(1)).updatePost(any(), any(), any(), any()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java index 9c04c67dd3a3b3..034a8215c4a8ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -24,10 +28,10 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.service.QueryService; -import com.linkedin.entity.client.EntityClient; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; import com.linkedin.query.QueryStatement; @@ -40,21 +44,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class CreateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final CreateQueryInput TEST_INPUT = new CreateQueryInput( - new CreateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString())) - ); + private static final CreateQueryInput TEST_INPUT = + new CreateQueryInput( + new CreateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString()))); @Test public void testGetSuccess() throws Exception { @@ -70,25 +72,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createQuery( + any(), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.anyLong()); } @Test @@ -105,23 +117,23 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createQuery( + any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.anyLong()); CreateQueryResolver resolver = new CreateQueryResolver(mockService); @@ -136,58 +148,66 @@ public void testGetQueryServiceException() throws Exception { private QueryService initMockService() { QueryService service = Mockito.mock(QueryService.class); - Mockito.when(service.createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_QUERY_URN); - - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects querySubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(querySubjects.data())) - ))) - ); + Mockito.when( + service.createQuery( + any(), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT + .getProperties() + .getStatement() + .getLanguage() + .toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.anyLong())) + .thenReturn(TEST_QUERY_URN); + + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects querySubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + + Mockito.when(service.getQueryEntityResponse(any(), Mockito.eq(TEST_QUERY_URN))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(querySubjects.data())))))); return service; } @@ -197,36 +217,40 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java index 78c894f27cbc3b..491f06e800d709 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -22,14 +26,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteQueryResolverTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test @@ -45,10 +46,7 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).deleteQuery(any(), Mockito.eq(TEST_QUERY_URN)); } @Test @@ -62,10 +60,7 @@ public void testGetSuccessCanEditQueries() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).deleteQuery(any(), Mockito.eq(TEST_QUERY_URN)); } @Test @@ -79,19 +74,14 @@ public void testGetFailureActorUnauthorized() { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)).deleteQuery(any(), Mockito.eq(TEST_QUERY_URN)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery(any(), Mockito.any()); DeleteQueryResolver resolver = new DeleteQueryResolver(mockService); @@ -108,14 +98,11 @@ private static QueryService initMockService() { QueryService mockService = Mockito.mock(QueryService.class); QuerySubjects existingQuerySubjects = new QuerySubjects(); - existingQuerySubjects.setSubjects(new QuerySubjectArray( - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)) - )); + existingQuerySubjects.setSubjects( + new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(mockService.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) - .thenReturn(existingQuerySubjects); + Mockito.when(mockService.getQuerySubjects(any(), Mockito.eq(TEST_QUERY_URN))) + .thenReturn(existingQuerySubjects); return mockService; } @@ -126,40 +113,47 @@ private QueryContext getMockAllowEditQueriesOnQueryContext() { private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getActorUrn()) + .thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), - DeleteQueryResolverTest.TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec( + DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), + DeleteQueryResolverTest.TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 877a4d2b27f6ae..ee728b17e8c621 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -14,7 +17,6 @@ import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; @@ -32,28 +34,24 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListQueriesResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = Urn.createFromTuple("query", "test-id"); - private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = new ListQueriesInput( - 0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString() - ); - private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = new ListQueriesInput( - 0, 30, null, QuerySource.MANUAL, null - ); - private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = new ListQueriesInput( - 0, 40, null, null, TEST_DATASET_URN.toString() - ); + private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = + new ListQueriesInput(0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString()); + private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = + new ListQueriesInput(0, 30, null, QuerySource.MANUAL, null); + private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = + new ListQueriesInput(0, 40, null, null, TEST_DATASET_URN.toString()); @DataProvider(name = "inputs") public static Object[][] inputs() { - return new Object[][] {{ TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER}}; + return new Object[][] { + {TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER} + }; } @Test(dataProvider = "inputs") @@ -61,22 +59,30 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(input.getQuery() == null ? ListQueriesResolver.DEFAULT_QUERY : input.getQuery()), - Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), - Mockito.eq(new SortCriterion().setField(ListQueriesResolver.CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN)))) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq( + input.getQuery() == null + ? ListQueriesResolver.DEFAULT_QUERY + : input.getQuery()), + Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), + Mockito.eq( + Collections.singletonList( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING))), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN))))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); @@ -90,7 +96,8 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getQueries().size(), 1); - assertEquals(resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); } @Test @@ -102,33 +109,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT_FULL_FILTERS); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_FULL_FILTERS); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + any(), + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt()); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver @@ -146,7 +153,8 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity if (source != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_SOURCE_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_SOURCE_FIELD, null, ImmutableList.of(source.toString()), false, @@ -154,14 +162,14 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity } if (entityUrn != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_ENTITIES_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_ENTITIES_FIELD, null, ImmutableList.of(entityUrn), false, FilterOperator.EQUAL)); - } criteria.setAnd(andConditions); - return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); + return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria), null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java index 9b500b5fb39361..ce21ed99595660 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -13,12 +17,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateQueryInput; -import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; -import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.QueryLanguage; import com.linkedin.datahub.graphql.generated.QueryStatementInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; +import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; import com.linkedin.entity.EntityResponse; @@ -40,22 +44,21 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class UpdateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateQueryInput TEST_INPUT = new UpdateQueryInput( - new UpdateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString())) - ); + private static final UpdateQueryInput TEST_INPUT = + new UpdateQueryInput( + new UpdateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString()))); @Test public void testGetSuccess() throws Exception { @@ -72,25 +75,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).updateQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN_2) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateQuery( + any(), + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2))), + Mockito.anyLong()); } @Test @@ -108,23 +121,23 @@ public void testGetUnauthorizedNoEditQueriesRights() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetQueryServiceException() throws Exception { // Update resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateQuery( + any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.anyLong()); UpdateQueryResolver resolver = new UpdateQueryResolver(mockService); @@ -143,56 +156,56 @@ private QueryService initMockService() { // Pre-Update QueryService service = Mockito.mock(QueryService.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(service.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) + Mockito.when(service.getQuerySubjects(any(), Mockito.eq(TEST_QUERY_URN))) .thenReturn(existingSubjects); // Post-Update - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects newSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN_2) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(newSubjects.data())) - ))) - ); + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects newSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)))); + + Mockito.when(service.getQueryEntityResponse(any(), Mockito.eq(TEST_QUERY_URN))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(newSubjects.data())))))); return service; } @@ -202,62 +215,71 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editQueriesRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); - - AuthorizationRequest editAllRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); + AuthorizationRequest editQueriesRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editQueriesRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); + + AuthorizationRequest editAllRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); AuthorizationResult editQueriesResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))).thenReturn(editQueriesResult1); + Mockito.when(editQueriesResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))) + .thenReturn(editQueriesResult1); AuthorizationResult editAllResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest1))).thenReturn(editAllResult1); AuthorizationResult editQueriesResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))).thenReturn(editQueriesResult2); + Mockito.when(editQueriesResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))) + .thenReturn(editQueriesResult2); AuthorizationResult editAllResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest2))).thenReturn(editAllResult2); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java index 3cde81d7a7f31d..bda1a7eec1920c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AcceptRoleResolverTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -54,7 +53,7 @@ public void testInvalidInviteToken() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(false); + when(_inviteTokenService.isInviteTokenValid(any(), eq(inviteTokenUrn))).thenReturn(false); AcceptRoleInput input = new AcceptRoleInput(); input.setInviteToken(INVITE_TOKEN_STRING); @@ -69,8 +68,8 @@ public void testNoRoleUrn() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(null); + when(_inviteTokenService.isInviteTokenValid(any(), eq(inviteTokenUrn))).thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(any(), eq(inviteTokenUrn))).thenReturn(null); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); @@ -89,8 +88,8 @@ public void testAssignRolePasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(roleUrn); + when(_inviteTokenService.isInviteTokenValid(any(), eq(inviteTokenUrn))).thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(any(), eq(inviteTokenUrn))).thenReturn(roleUrn); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java index 85891dbd96fb08..6411728552a1ec 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.google.common.collect.ImmutableList; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchAssignRoleResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 8d8faf5c3f12eb..e9d5ef00e74dd7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +43,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(true))) + .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index ef426979953d06..78d848e882b6bf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetInviteTokenInput; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GetInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -20,13 +20,15 @@ public class GetInviteTokenResolverTest { private GetInviteTokenResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; private Authentication _authentication; + private OperationContext opContext; @BeforeMethod public void setupTest() throws Exception { _inviteTokenService = mock(InviteTokenService.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - + opContext = mock(OperationContext.class); + when(opContext.getAuthentication()).thenReturn(_authentication); _resolver = new GetInviteTokenResolver(_inviteTokenService); } @@ -43,12 +45,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(OperationContext.class), any(), eq(false))) + .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/IsAssignedToMeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/IsAssignedToMeResolverTest.java new file mode 100644 index 00000000000000..66e42e8439ccc3 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/IsAssignedToMeResolverTest.java @@ -0,0 +1,81 @@ +package com.linkedin.datahub.graphql.resolvers.role; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.*; +import com.linkedin.datahub.graphql.resolvers.dataset.IsAssignedToMeResolver; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IsAssignedToMeResolverTest { + + private static final Urn TEST_CORP_USER_URN_1 = UrnUtils.getUrn("urn:li:corpuser:test-user-1"); + private static final Urn TEST_CORP_USER_URN_2 = UrnUtils.getUrn("urn:li:corpuser:test-user-2"); + private static final Urn TEST_CORP_USER_URN_3 = UrnUtils.getUrn("urn:li:corpuser:test-user-3"); + + @Test + public void testReturnsTrueIfCurrentUserIsAssignedToRole() throws Exception { + + CorpUser corpUser1 = new CorpUser(); + corpUser1.setUrn(TEST_CORP_USER_URN_1.toString()); + CorpUser corpUser2 = new CorpUser(); + corpUser2.setUrn(TEST_CORP_USER_URN_2.toString()); + CorpUser corpUser3 = new CorpUser(); + corpUser3.setUrn(TEST_CORP_USER_URN_3.toString()); + + ArrayList roleUsers = new ArrayList<>(); + roleUsers.add(new RoleUser(corpUser1)); + roleUsers.add(new RoleUser(corpUser2)); + roleUsers.add(new RoleUser(corpUser3)); + + Actor actor = new Actor(); + actor.setUsers(roleUsers); + Role role = new Role(); + role.setUrn("urn:li:role:fake-role"); + role.setActors(actor); + + QueryContext mockContext = getMockAllowContext(TEST_CORP_USER_URN_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(role); + + IsAssignedToMeResolver resolver = new IsAssignedToMeResolver(); + assertTrue(resolver.get(mockEnv).get()); + } + + @Test + public void testReturnsFalseIfCurrentUserIsNotAssignedToRole() throws Exception { + + CorpUser corpUser1 = new CorpUser(); + corpUser1.setUrn(TEST_CORP_USER_URN_1.toString()); + CorpUser corpUser2 = new CorpUser(); + corpUser2.setUrn(TEST_CORP_USER_URN_2.toString()); + CorpUser corpUser3 = new CorpUser(); + corpUser3.setUrn(TEST_CORP_USER_URN_3.toString()); + + ArrayList roleUsers = new ArrayList<>(); + roleUsers.add(new RoleUser(corpUser2)); + roleUsers.add(new RoleUser(corpUser3)); + + Actor actor = new Actor(); + actor.setUsers(roleUsers); + Role role = new Role(); + role.setUrn("urn:li:role:fake-role"); + role.setActors(actor); + + QueryContext mockContext = getMockAllowContext(TEST_CORP_USER_URN_1.toString()); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Mockito.when(mockEnv.getSource()).thenReturn(role); + + IsAssignedToMeResolver resolver = new IsAssignedToMeResolver(); + assertFalse(resolver.get(mockEnv).get()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index 4a0b062c67ffd2..7a3189338d6620 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -12,7 +19,6 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -20,18 +26,9 @@ import com.linkedin.policy.DataHubRoleInfo; import graphql.schema.DataFetchingEnvironment; import java.util.Map; -import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListRolesResolverTest { private static final String ADMIN_ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String EDITOR_ROLE_URN_STRING = "urn:li:dataHubRole:Editor"; @@ -47,8 +44,11 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(roleUrn.toString()); dataHubRoleInfo.setName(roleUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return entityResponse; } @@ -57,8 +57,12 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { public void setupTest() throws Exception { Urn adminRoleUrn = Urn.createFromString(ADMIN_ROLE_URN_STRING); Urn editorRoleUrn = Urn.createFromString(EDITOR_ROLE_URN_STRING); - _entityResponseMap = ImmutableMap.of(adminRoleUrn, getMockRoleEntityResponse(adminRoleUrn), editorRoleUrn, - getMockRoleEntityResponse(editorRoleUrn)); + _entityResponseMap = + ImmutableMap.of( + adminRoleUrn, + getMockRoleEntityResponse(adminRoleUrn), + editorRoleUrn, + getMockRoleEntityResponse(editorRoleUrn)); _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); @@ -84,14 +88,22 @@ public void testListRoles() throws Exception { ListRolesInput input = new ListRolesInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(2); - roleSearchResult.setEntities(new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), - new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); - - when(_entityClient.search(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt(), any(), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(2); + roleSearchResult.setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), + new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); + + when(_entityClient.search( + any(), eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt())) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(any(), eq(DATAHUB_ROLE_ENTITY_NAME), any(), any())) + .thenReturn(_entityResponseMap); ListRolesResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c161a66d3ee933..d32eb9fcf120ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -12,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -24,67 +27,57 @@ import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class AggregateAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + FormService mockFormService = Mockito.mock(FormService.class); List facets = ImmutableList.of("platform", "domains"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -94,18 +87,16 @@ public static void testApplyViewNullBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", viewFilter, // Verify that view filter was used. 0, 0, facets // Verify called with facets we provide - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -113,42 +104,45 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + FormService mockFormService = Mockito.mock(FormService.class); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -158,18 +152,15 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, 0, - null - ); + null); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -178,36 +169,29 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - null, - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + FormService mockFormService = Mockito.mock(FormService.class); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -218,18 +202,17 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, facets // Verify facets passed in were used - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -238,36 +221,30 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + FormService mockFormService = Mockito.mock(FormService.class); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -278,55 +255,51 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, null // Verify that an empty list for facets in input sends null - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + FormService mockFormService = Mockito.mock(FormService.class); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -334,45 +307,32 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 0, - null - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 0, null); } @Test public static void testErrorFetchingResults() throws Exception { - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + FormService mockFormService = Mockito.mock(FormService.class); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + any(), + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) + .thenThrow(new RemoteInvocationException()); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService, mockFormService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -383,17 +343,18 @@ public static void testErrorFetchingResults() throws Exception { private static Filter createFilter(String field, String value) { return new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField(field) - .setValue(value) - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of(value))) - )) - ))); + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(field) + .setValue(value) + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues(new StringArray(ImmutableList.of(value)))))))); } private static DataHubViewInfo getViewInfo(Filter viewFilter) { @@ -402,24 +363,19 @@ private static DataHubViewInfo getViewInfo(Filter viewFilter) { info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); return info; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(any(), Mockito.eq(viewUrn))).thenReturn(viewInfo); return service; } @@ -430,22 +386,20 @@ private static EntityClient initMockEntityClient( int start, int limit, List facets, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + any(), + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(Collections.emptyList()), + Mockito.eq(facets))) + .thenReturn(result); return client; } @@ -456,33 +410,23 @@ private static void verifyMockEntityClient( Filter filter, int start, int limit, - List facets - ) throws Exception { + List facets) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - ); + Mockito.eq(Collections.emptyList()), + Mockito.eq(facets)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { - Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(viewUrn)); } - private AggregateAcrossEntitiesResolverTest() { } - + private AggregateAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 7397ea8fa21cfc..45190bbfc93e5c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,6 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static org.mockito.ArgumentMatchers.any; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -32,14 +34,12 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class AutoCompleteForMultipleResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private AutoCompleteForMultipleResolverTest() { } + private AutoCompleteForMultipleResolverTest() {} public static void testAutoCompleteResolverSuccess( EntityClient mockClient, @@ -48,9 +48,10 @@ public static void testAutoCompleteResolverSuccess( EntityType entityType, SearchableEntityType entity, Urn viewUrn, - Filter filter - ) throws Exception { - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); + Filter filter) + throws Exception { + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -65,57 +66,73 @@ public static void testAutoCompleteResolverSuccess( Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - entityName, - "test", - filter, - 10 - ); + verifyMockEntityClient(mockClient, entityName, "test", filter, 10); } // test our main entity types @Test public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); + // Daatasets - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATASET_ENTITY_NAME, EntityType.DATASET, new DatasetType(mockClient), null, null); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATASET_ENTITY_NAME, + EntityType.DATASET, + new DatasetType(mockClient), + null, + null); // Dashboards - mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DASHBOARD_ENTITY_NAME, EntityType.DASHBOARD, new DashboardType(mockClient), null, null); + mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DASHBOARD_ENTITY_NAME, + EntityType.DASHBOARD, + new DashboardType(mockClient), + null, + null); - //DataFlows - mockClient = initMockEntityClient( - Constants.DATA_FLOW_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATA_FLOW_ENTITY_NAME, EntityType.DATA_FLOW, new DataFlowType(mockClient), null, null); + // DataFlows + mockClient = + initMockEntityClient( + Constants.DATA_FLOW_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATA_FLOW_ENTITY_NAME, + EntityType.DATA_FLOW, + new DataFlowType(mockClient), + null, + null); } // test filters with a given view @@ -123,16 +140,17 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); testAutoCompleteResolverSuccess( mockClient, viewService, @@ -140,8 +158,7 @@ public static void testAutoCompleteResolverWithViewFilter() throws Exception { EntityType.DATASET, new DatasetType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); } // test entity type filters with a given view @@ -152,16 +169,16 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti entityNames.add(Constants.DASHBOARD_ENTITY_NAME); DataHubViewInfo viewInfo = createViewInfo(entityNames); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); // ensure we do hit the entity client for dashboards since dashboards are in our view testAutoCompleteResolverSuccess( @@ -171,25 +188,27 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti EntityType.DASHBOARD, new DashboardType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); - // if the view has only dashboards, we should not make an auto-complete request on other entity types + // if the view has only dashboards, we should not make an auto-complete request on other entity + // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( + any(), Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), - Mockito.eq(10), - Mockito.any(Authentication.class) - ); + Mockito.eq(10)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(new DatasetType(mockClient)), viewService); + + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver( + ImmutableList.of(new DatasetType(mockClient)), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -204,75 +223,59 @@ public static void testAutoCompleteResolverFailNoQuery() throws Exception { } private static EntityClient initMockEntityClient( - String entityName, - String query, - Filter filters, - int limit, - AutoCompleteResult result - ) throws Exception { + String entityName, String query, Filter filters, int limit, AutoCompleteResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.autoComplete( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn(result); + Mockito.when( + client.autoComplete( + any(), + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filters), + Mockito.eq(limit))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(any(), Mockito.eq(viewUrn))).thenReturn(viewInfo); return service; } - + private static void verifyMockEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filters, - int limit - ) throws Exception { + EntityClient mockClient, String entityName, String query, Filter filters, int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( + any(), Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - ); + Mockito.eq(limit)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index a599117c3e165a..64042e82bbfe88 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -1,13 +1,16 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static org.mockito.ArgumentMatchers.any; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput; import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult; import com.linkedin.datahub.graphql.generated.QuickFilter; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.AggregationMetadata; @@ -20,20 +23,17 @@ import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class GetQuickFiltersResolverTest { @@ -41,19 +41,21 @@ public class GetQuickFiltersResolverTest { public static void testGetQuickFiltersHappyPathSuccess() throws Exception { SearchResultMetadata mockData = getHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -72,19 +74,21 @@ public static void testGetQuickFiltersHappyPathSuccess() throws Exception { public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { SearchResultMetadata mockData = getUnHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -103,16 +107,17 @@ public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { public static void testGetQuickFiltersFailure() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.searchAcrossEntities( + any(), + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) + .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -124,26 +129,36 @@ public static void testGetQuickFiltersFailure() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static void compareResultToExpectedData(GetQuickFiltersResult result, GetQuickFiltersResult expected) { - IntStream.range(0, result.getQuickFilters().size()).forEach(index -> { - QuickFilter resultFilter = result.getQuickFilters().get(index); - QuickFilter expectedFilter = expected.getQuickFilters().get(index); - Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); - Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); - if (resultFilter.getEntity() != null) { - Assert.assertEquals(resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); - } - }); + private static void compareResultToExpectedData( + GetQuickFiltersResult result, GetQuickFiltersResult expected) { + IntStream.range(0, result.getQuickFilters().size()) + .forEach( + index -> { + QuickFilter resultFilter = result.getQuickFilters().get(index); + QuickFilter expectedFilter = expected.getQuickFilters().get(index); + Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); + Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); + if (resultFilter.getEntity() != null) { + Assert.assertEquals( + resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); + } + }); } private static SearchResultMetadata getHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); FilterValueArray entityTypeFilters = new FilterValueArray(); entityTypeFilters.add(createFilterValue("dataset", 100, null)); @@ -168,11 +183,18 @@ private static GetQuickFiltersResult getHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); // platforms should be in alphabetical order - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DASHBOARD", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_FLOW", null)); @@ -186,9 +208,12 @@ private static GetQuickFiltersResult getHappyPathResultData() { private static SearchResultMetadata getUnHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); // only 3 platforms available - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); FilterValueArray entityTypeFilters = new FilterValueArray(); // no dashboard, data flows, or glossary terms @@ -210,10 +235,15 @@ private static SearchResultMetadata getUnHappyPathTestData() { private static GetQuickFiltersResult getUnHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); - // in correct order by count for platforms (alphabetical). In correct order by priority for entity types - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + // in correct order by count for platforms (alphabetical). In correct order by priority for + // entity types + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_JOB", null)); quickFilters.add(createQuickFilter("_entityType", "CHART", null)); @@ -224,17 +254,19 @@ private static GetQuickFiltersResult getUnHappyPathResultData() { return result; } - private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { + private static QuickFilter createQuickFilter( + @Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(value); if (entityUrn != null) { - quickFilter.setEntity(UrnToEntityMapper.map(UrnUtils.getUrn(entityUrn))); + quickFilter.setEntity(UrnToEntityMapper.map(null, UrnUtils.getUrn(entityUrn))); } return quickFilter; } - private static FilterValue createFilterValue(@Nonnull final String value, final int count, @Nullable final String entity) { + private static FilterValue createFilterValue( + @Nonnull final String value, final int count, @Nullable final String entity) { FilterValue filterValue = new FilterValue(); filterValue.setValue(value); filterValue.setFacetCount(count); @@ -244,7 +276,8 @@ private static FilterValue createFilterValue(@Nonnull final String value, final return filterValue; } - private static AggregationMetadata createAggregationMetadata(@Nonnull final String name, @Nonnull final FilterValueArray filterValues) { + private static AggregationMetadata createAggregationMetadata( + @Nonnull final String name, @Nonnull final FilterValueArray filterValues) { AggregationMetadata aggregationMetadata = new AggregationMetadata(); aggregationMetadata.setName(name); aggregationMetadata.setFilterValues(filterValues); @@ -257,24 +290,22 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + any(), + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(Collections.emptyList()), + Mockito.eq(null))) + .thenReturn(result); return client; } - private GetQuickFiltersResolverTest() { } - + private GetQuickFiltersResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index b0a681c9b23423..30d6f2dc6f2836 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.mockito.ArgumentMatchers.any; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -12,7 +15,7 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; -import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; +import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; @@ -38,167 +41,172 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - public class SearchAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockEntityClient( + mockClient, + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", - viewFilter, + viewFilter, // Verify that view filter was used. 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockEntityClient( - mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. - "", - viewFilter, // Verify that view filter was used. - 0, - 10 - ); - - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + 10); + + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("baseField.keyword") - .setValue("baseTest") - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("baseTest"))) - )) - ))); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("baseField.keyword") + .setValue("baseTest") + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues( + new StringArray(ImmutableList.of("baseTest")))))))); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -208,74 +216,66 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewNullBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - null, - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + null, "", 0, 10, null, null, TEST_VIEW_URN.toString(), null, null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -285,74 +285,75 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewEmptyBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -362,56 +363,55 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -419,49 +419,41 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 10 - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 10); } @Test public static void testApplyViewErrorFetchingView() throws Exception { // When a view cannot be successfully resolved, the endpoint show THROW. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + any(), + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(Collections.emptyList()), + Mockito.eq(Collections.emptyList()))) + .thenThrow(new RemoteInvocationException()); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -470,17 +462,9 @@ public static void testApplyViewErrorFetchingView() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(any(), Mockito.eq(viewUrn))).thenReturn(viewInfo); return service; } @@ -490,21 +474,19 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + any(), + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(Collections.emptyList()))) + .thenReturn(result); return client; } @@ -514,32 +496,22 @@ private static void verifyMockEntityClient( String query, Filter filter, int start, - int limit - ) throws Exception { + int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( + any(), Mockito.eq(entityTypes), Mockito.eq(query), Mockito.eq(filter), Mockito.eq(start), Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - ); + Mockito.eq(Collections.emptyList())); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { - Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(viewUrn)); } - private SearchAcrossEntitiesResolverTest() { } - + private SearchAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index c68b621e6921f2..153e98149ff1a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.UrnUtils; @@ -10,7 +14,8 @@ import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.AggregationMetadataArray; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchEntityArray; @@ -18,20 +23,20 @@ import com.linkedin.metadata.search.MatchedFieldArray; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.io.InputStream; import java.util.Collections; import java.util.List; +import org.mockito.ArgumentCaptor; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - // Initialize this class in the style of SearchAcrossEntitiesResolverTest.java public class SearchAcrossLineageResolverTest { - private static final String SOURCE_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; - private static final String TARGET_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; + private static final String SOURCE_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; + private static final String TARGET_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; private static final String QUERY = ""; private static final int START = 0; private static final int COUNT = 10; @@ -42,13 +47,28 @@ public class SearchAcrossLineageResolverTest { private Authentication _authentication; private SearchAcrossLineageResolver _resolver; + private EntityRegistry _entityRegistry; + @BeforeMethod public void setupTest() { _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); _authentication = mock(Authentication.class); - _resolver = new SearchAcrossLineageResolver(_entityClient); + _entityRegistry = mock(EntityRegistry.class); + _resolver = new SearchAcrossLineageResolver(_entityClient, _entityRegistry); + } + + @Test + public void testAllEntitiesInitialization() { + InputStream inputStream = ClassLoader.getSystemResourceAsStream("entity-registry.yml"); + EntityRegistry entityRegistry = new ConfigEntityRegistry(inputStream); + SearchAcrossLineageResolver resolver = + new SearchAcrossLineageResolver(_entityClient, entityRegistry); + assertTrue(resolver._allEntities.contains("dataset")); + assertTrue(resolver._allEntities.contains("dataFlow")); + // Test for case sensitivity + assertFalse(resolver._allEntities.contains("dataflow")); } @Test @@ -85,25 +105,30 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchEntity.setMatchedFields(new MatchedFieldArray()); lineageSearchEntity.setPaths(new UrnArrayArray()); lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); + ArgumentCaptor opContext = ArgumentCaptor.forClass(OperationContext.class); when(_entityClient.searchAcrossLineage( - eq(UrnUtils.getUrn(SOURCE_URN_STRING)), - eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), - anyList(), - eq(QUERY), - eq(null), - any(), - eq(null), - eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))).thenReturn(lineageSearchResult); + opContext.capture(), + eq(UrnUtils.getUrn(SOURCE_URN_STRING)), + eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), + anyList(), + eq(QUERY), + eq(null), + any(), + eq(null), + eq(START), + eq(COUNT))) + .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); assertEquals(results.getTotal(), 1); + assertEquals( + opContext.getValue().getSearchContext().getLineageFlags().getStartTimeMillis(), + START_TIMESTAMP_MILLIS); + assertEquals( + opContext.getValue().getSearchContext().getLineageFlags().getEndTimeMillis(), + END_TIMESTAMP_MILLIS); final List entities = results.getSearchResults(); assertEquals(entities.size(), 1); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 6ba8b3cefe5046..fbbf5cf314eda3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,190 +1,192 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; + +import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.query.GroupingCriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; +import java.util.List; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - - public class SearchResolverTest { - @Test - public void testDefaultSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - @Test - public void testOverrideSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchFlags inputSearchFlags = new SearchFlags(); - inputSearchFlags.setFulltext(false); - inputSearchFlags.setSkipAggregates(true); - inputSearchFlags.setSkipHighlighting(true); - inputSearchFlags.setMaxAggValues(10); - inputSearchFlags.setSkipCache(true); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 1, - 11, - null, - null, - inputSearchFlags - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 1, - 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true) - ); - } - @Test - public void testNonWildCardSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "not a wildcard", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "not a wildcard", - null, // Verify that view filter was used. - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); + private com.linkedin.metadata.query.SearchFlags setConvertSchemaFieldsToDatasets( + com.linkedin.metadata.query.SearchFlags flags, boolean value) { + if (value) { + return flags.setGroupingSpec( + new com.linkedin.metadata.query.GroupingSpec() + .setGroupingCriteria( + new GroupingCriterionArray( + new com.linkedin.metadata.query.GroupingCriterion() + .setBaseEntityType(SCHEMA_FIELD_ENTITY_NAME) + .setGroupingEntityType(DATASET_ENTITY_NAME)))); + } else { + return flags.setGroupingSpec(null, SetMode.REMOVE_IF_NULL); } - - private EntityClient initMockSearchEntityClient() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.search( + } + + @Test + public void testDefaultSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = new SearchInput(EntityType.DATASET, "", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + Collections.emptyList(), + 0, + 10, + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), + true)); + } + + @Test + public void testOverrideSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchFlags inputSearchFlags = new SearchFlags(); + inputSearchFlags.setFulltext(false); + inputSearchFlags.setSkipAggregates(true); + inputSearchFlags.setSkipHighlighting(true); + inputSearchFlags.setMaxAggValues(10); + inputSearchFlags.setSkipCache(true); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "", 1, 11, null, null, inputSearchFlags); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + Collections.emptyList(), + 1, + 11, + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true), + false)); + } + + @Test + public void testNonWildCardSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "not a wildcard", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "not a wildcard", + null, // Verify that view filter was used. + Collections.emptyList(), + 0, + 10, + setConvertSchemaFieldsToDatasets( + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false) + .setIncludeSoftDeleted(false) + .setIncludeRestricted(false), + true)); + } + + private EntityClient initMockSearchEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + Mockito.when( + client.search( + any(), Mockito.anyString(), Mockito.anyString(), Mockito.any(), Mockito.any(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.any() - )).thenReturn( - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - return client; - } - - private void verifyMockSearchEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filter, - SortCriterion sortCriterion, - int start, - int limit, - com.linkedin.metadata.query.SearchFlags searchFlags - ) throws Exception { - Mockito.verify(mockClient, Mockito.times(1)).search( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(sortCriterion), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags) - ); - } - - private SearchResolverTest() { - } + Mockito.anyInt())) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + return client; + } + + private void verifyMockSearchEntityClient( + EntityClient mockClient, + String entityName, + String query, + Filter filter, + List sortCriteria, + int start, + int limit, + com.linkedin.metadata.query.SearchFlags searchFlags) + throws Exception { + Mockito.verify(mockClient, Mockito.times(1)) + .search( + any(), + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(sortCriteria), + Mockito.eq(start), + Mockito.eq(limit)); + } + + private SearchResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java index b35f7a77b209c9..8f23f0a6245766 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java @@ -17,16 +17,18 @@ public class SearchUtilsTest { @Test public static void testApplyViewToFilterNullBaseFilter() { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(null, viewFilter); Assert.assertEquals(viewFilter, result); @@ -34,275 +36,272 @@ public static void testApplyViewToFilterNullBaseFilter() { @Test public static void testApplyViewToFilterComplexBaseFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterComplexViewFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray(ImmutableList.of("viewTest4")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterV1Filter() { - Filter baseFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ); - - Filter viewFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ); - - Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( + Filter baseFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("field1") .setValue("test1") @@ -310,7 +309,13 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("field2") .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), + .setValues(new StringArray(ImmutableList.of("test2")))))); + + Filter viewFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("viewField1") .setValue("viewTest1") @@ -318,10 +323,38 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("viewField2") .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ) - ))); + .setValues(new StringArray(ImmutableList.of("viewTest2")))))); + + Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); + + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray( + ImmutableList.of("viewTest2"))))))))); Assert.assertEquals(expectedResult, result); } @@ -329,24 +362,17 @@ public static void testApplyViewToFilterV1Filter() { @Test public static void testApplyViewToEntityTypes() { - List baseEntityTypes = ImmutableList.of( - Constants.CHART_ENTITY_NAME, - Constants.DATASET_ENTITY_NAME - ); + List baseEntityTypes = + ImmutableList.of(Constants.CHART_ENTITY_NAME, Constants.DATASET_ENTITY_NAME); - List viewEntityTypes = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME - ); + List viewEntityTypes = + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME); final List result = SearchUtils.intersectEntityTypes(baseEntityTypes, viewEntityTypes); - final List expectedResult = ImmutableList.of( - Constants.DATASET_ENTITY_NAME - ); + final List expectedResult = ImmutableList.of(Constants.DATASET_ENTITY_NAME); Assert.assertEquals(expectedResult, result); } - private SearchUtilsTest() { } - + private SearchUtilsTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java index 905e913fba909c..6c9d259230f1d4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -15,29 +18,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateCorpUserViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = new UpdateCorpUserViewsSettingsInput( - TEST_URN.toString() - ); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = new UpdateCorpUserViewsSettingsInput( - null - ); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = + new UpdateCorpUserViewsSettingsInput(TEST_URN.toString()); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = + new UpdateCorpUserViewsSettingsInput(null); @Test public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -46,25 +44,28 @@ public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + any(), + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN)))); } @Test public void testGetSuccessViewSettingsExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -73,26 +74,28 @@ public void testGetSuccessViewSettingsExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + any(), + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN)))); } - @Test public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -101,22 +104,26 @@ public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + any(), + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL)))); } @Test public void testGetCorpUserSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getCorpUserSettings(any(), Mockito.eq(TEST_USER_URN)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -126,19 +133,16 @@ public void testGetCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateCorpUserSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - null - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(CorpUserSettings.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(TEST_USER_URN, null); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateCorpUserSettings( + any(), Mockito.eq(TEST_USER_URN), Mockito.any(CorpUserSettings.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -148,17 +152,12 @@ public void testUpdateCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - Urn user, - CorpUserSettings existingSettings - ) { + private static SettingsService initSettingsService(Urn user, CorpUserSettings existingSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getCorpUserSettings( - Mockito.eq(user), - Mockito.any(Authentication.class))) + Mockito.when(mockService.getCorpUserSettings(any(), Mockito.eq(user))) .thenReturn(existingSettings); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java index 4e2283735b8c97..bd528020e88901 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -14,10 +17,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class GlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -25,9 +24,7 @@ public class GlobalViewsSettingsResolverTest { @Test public void testGetSuccessNullSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -42,9 +39,7 @@ public void testGetSuccessNullSettings() throws Exception { @Test public void testGetSuccessEmptySettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -53,16 +48,13 @@ public void testGetSuccessEmptySettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertNull( - result.getDefaultView() - ); + Assert.assertNull(result.getDefaultView()); } @Test public void testGetSuccessExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(TEST_URN) - ); + SettingsService mockService = + initSettingsService(new GlobalViewsSettings().setDefaultView(TEST_URN)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -71,17 +63,13 @@ public void testGetSuccessExistingSettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertEquals( - result.getDefaultView(), - TEST_URN.toString() - ); + Assert.assertEquals(result.getDefaultView(), TEST_URN.toString()); } @Test public void testGetException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings(any()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); @@ -94,9 +82,7 @@ public void testGetException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -107,15 +93,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) + Mockito.when(mockService.getGlobalSettings(any())) .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java index 9ea3c223559cd2..d673806b5724e7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -14,22 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateGlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); - private static final UpdateGlobalViewsSettingsInput TEST_INPUT = new UpdateGlobalViewsSettingsInput( - TEST_URN.toString() - ); + private static final UpdateGlobalViewsSettingsInput TEST_INPUT = + new UpdateGlobalViewsSettingsInput(TEST_URN.toString()); @Test public void testGetSuccessNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -39,16 +35,17 @@ public void testGetSuccessNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + any(), + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN)))); } @Test public void testGetSuccessNoDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -58,18 +55,20 @@ public void testGetSuccessNoDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + any(), + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN)))); } @Test public void testGetSuccessExistingDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - )) - ); + SettingsService mockService = + initSettingsService( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView"))); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -79,16 +78,18 @@ public void testGetSuccessExistingDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + any(), + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN)))); } @Test public void testGetGlobalViewsSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings(any()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -100,15 +101,12 @@ public void testGetGlobalViewsSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateGlobalViewsSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateGlobalSettings( - Mockito.any(GlobalSettingsInfo.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateGlobalSettings(any(), Mockito.any(GlobalSettingsInfo.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -122,11 +120,11 @@ public void testUpdateGlobalViewsSettingsException() throws Exception { @Test public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - null // Should never be null. - ); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + SettingsService mockService = + initSettingsService( + null // Should never be null. + ); + Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings(any()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -140,9 +138,7 @@ public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -154,15 +150,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) - .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); + Mockito.when(mockService.getGlobalSettings(any())) + .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java index 8c4445452c5647..c7440161005b41 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -21,12 +26,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchGetStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final long TIME = 123L; @@ -35,7 +34,8 @@ public class BatchGetStepStatesResolverTest { private static final String SECOND_STEP_STATE_ID = "2"; private static final Urn FIRST_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:1"); private static final Urn SECOND_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:2"); - private static final Set ASPECTS = ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); + private static final Set ASPECTS = + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); private EntityClient _entityClient; private BatchGetStepStatesResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; @@ -61,22 +61,23 @@ public void testBatchGetStepStatesFirstStepCompleted() throws Exception { input.setIds(ImmutableList.of(FIRST_STEP_STATE_ID, SECOND_STEP_STATE_ID)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - when(_entityClient.exists(eq(FIRST_STEP_STATE_URN), eq(_authentication))).thenReturn(true); - when(_entityClient.exists(eq(SECOND_STEP_STATE_URN), eq(_authentication))).thenReturn(false); + when(_entityClient.exists(any(), eq(FIRST_STEP_STATE_URN))).thenReturn(true); + when(_entityClient.exists(any(), eq(SECOND_STEP_STATE_URN))).thenReturn(false); final DataHubStepStateProperties firstStepStateProperties = new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map entityResponseMap = ImmutableMap.of(FIRST_STEP_STATE_URN, - TestUtils.buildEntityResponse(firstAspectMap)); + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of(FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap)); - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + when(_entityClient.batchGetV2(any(), eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); } @@ -91,8 +92,8 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { input.setIds(ImmutableList.of(FIRST_STEP_STATE_ID, SECOND_STEP_STATE_ID)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - when(_entityClient.exists(eq(FIRST_STEP_STATE_URN), eq(_authentication))).thenReturn(true); - when(_entityClient.exists(eq(SECOND_STEP_STATE_URN), eq(_authentication))).thenReturn(true); + when(_entityClient.exists(any(), eq(FIRST_STEP_STATE_URN))).thenReturn(true); + when(_entityClient.exists(any(), eq(SECOND_STEP_STATE_URN))).thenReturn(true); final DataHubStepStateProperties firstStepStateProperties = new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); @@ -100,18 +101,20 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN, SECOND_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map secondAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - secondStepStateProperties); - final Map entityResponseMap = ImmutableMap.of( - FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), - SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); - - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map secondAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, secondStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of( + FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), + SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); + + when(_entityClient.batchGetV2(any(), eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(2, actualBatchResult.getResults().size()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java index 5f20a11f15ac66..67fb8cb892b6e3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,11 +20,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchUpdateStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final String FIRST_STEP_STATE_ID = "1"; @@ -52,9 +51,10 @@ public void testBatchUpdateStepStatesFirstStepCompleted() throws Exception { input.setStates(ImmutableList.of(firstInput)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - final BatchUpdateStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchUpdateStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); - verify(_entityClient, times(1)).ingestProposal(any(), eq(_authentication), eq(false)); + verify(_entityClient, times(1)).ingestProposal(any(), any(), eq(false)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java new file mode 100644 index 00000000000000..72cdb78542e414 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/CreateStructuredPropertyResolverTest.java @@ -0,0 +1,126 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreateStructuredPropertyInput; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class CreateStructuredPropertyResolverTest { + private static final String TEST_STRUCTURED_PROPERTY_URN = "urn:li:structuredProperty:1"; + + private static final CreateStructuredPropertyInput TEST_INPUT = + new CreateStructuredPropertyInput( + null, + "io.acryl.test", + "Display Name", + "description", + true, + null, + null, + null, + null, + new ArrayList<>()); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + StructuredPropertyEntity prop = resolver.get(mockEnv).get(); + + assertEquals(prop.getUrn(), TEST_STRUCTURED_PROPERTY_URN); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call ingest + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetFailure() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false); + CreateStructuredPropertyResolver resolver = + new CreateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that ingest was called, but that caused a failure + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + EntityResponse response = new EntityResponse(); + response.setEntityName(Constants.STRUCTURED_PROPERTY_ENTITY_NAME); + response.setUrn(UrnUtils.getUrn(TEST_STRUCTURED_PROPERTY_URN)); + response.setAspects(new EnvelopedAspectMap()); + if (shouldSucceed) { + Mockito.when( + client.getV2( + any(), + Mockito.eq(Constants.STRUCTURED_PROPERTY_ENTITY_NAME), + any(), + Mockito.eq(null))) + .thenReturn(response); + } else { + Mockito.when( + client.getV2( + any(), + Mockito.eq(Constants.STRUCTURED_PROPERTY_ENTITY_NAME), + any(), + Mockito.eq(null))) + .thenThrow(new RemoteInvocationException()); + } + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolverTest.java new file mode 100644 index 00000000000000..f7882bb2c93a8a --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/RemoveStructuredPropertiesResolverTest.java @@ -0,0 +1,123 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertThrows; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.RemoveStructuredPropertiesInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class RemoveStructuredPropertiesResolverTest { + + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String PROPERTY_URN_1 = "urn:li:structuredProperty:test1"; + private static final String PROPERTY_URN_2 = "urn:li:structuredProperty:test2"; + + private static final RemoveStructuredPropertiesInput TEST_INPUT = + new RemoveStructuredPropertiesInput( + TEST_DATASET_URN, ImmutableList.of(PROPERTY_URN_1, PROPERTY_URN_2)); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(); + RemoveStructuredPropertiesResolver resolver = + new RemoveStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(); + RemoveStructuredPropertiesResolver resolver = + new RemoveStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call ingest + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetThrowsError() throws Exception { + // if the entity you are trying to remove properties from doesn't exist + EntityClient mockEntityClient = Mockito.mock(EntityClient.class); + Mockito.when(mockEntityClient.exists(any(), Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)))) + .thenReturn(false); + RemoveStructuredPropertiesResolver resolver = + new RemoveStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call ingest + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + private EntityClient initMockEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + EntityResponse response = new EntityResponse(); + response.setEntityName(Constants.DATASET_ENTITY_NAME); + response.setUrn(UrnUtils.getUrn(TEST_DATASET_URN)); + final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + StructuredProperties properties = new StructuredProperties(); + properties.setProperties(new StructuredPropertyValueAssignmentArray()); + aspectMap.put( + STRUCTURED_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(properties.data()))); + response.setAspects(aspectMap); + Mockito.when( + client.getV2( + any(), + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)), + Mockito.eq(ImmutableSet.of(Constants.STRUCTURED_PROPERTIES_ASPECT_NAME)))) + .thenReturn(response); + Mockito.when(client.exists(any(), Mockito.eq(UrnUtils.getUrn(TEST_DATASET_URN)))) + .thenReturn(true); + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java new file mode 100644 index 00000000000000..971a53de9473b5 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpdateStructuredPropertyResolverTest.java @@ -0,0 +1,123 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.StructuredPropertyEntity; +import com.linkedin.datahub.graphql.generated.UpdateStructuredPropertyInput; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.r2.RemoteInvocationException; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpdateStructuredPropertyResolverTest { + private static final String TEST_STRUCTURED_PROPERTY_URN = "urn:li:structuredProperty:1"; + + private static final UpdateStructuredPropertyInput TEST_INPUT = + new UpdateStructuredPropertyInput( + TEST_STRUCTURED_PROPERTY_URN, + "New Display Name", + "new description", + true, + null, + null, + null, + null); + + @Test + public void testGetSuccess() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateStructuredPropertyResolver resolver = + new UpdateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + StructuredPropertyEntity prop = resolver.get(mockEnv).get(); + + assertEquals(prop.getUrn(), TEST_STRUCTURED_PROPERTY_URN); + + // Validate that we called ingest + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetUnauthorized() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(true); + UpdateStructuredPropertyResolver resolver = + new UpdateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockDenyContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we did NOT call ingest + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetFailure() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false); + UpdateStructuredPropertyResolver resolver = + new UpdateStructuredPropertyResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that ingest was called, but that caused a failure + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + private EntityClient initMockEntityClient(boolean shouldSucceed) throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + EntityResponse response = new EntityResponse(); + response.setEntityName(Constants.STRUCTURED_PROPERTY_ENTITY_NAME); + response.setUrn(UrnUtils.getUrn(TEST_STRUCTURED_PROPERTY_URN)); + response.setAspects(new EnvelopedAspectMap()); + if (shouldSucceed) { + Mockito.when( + client.getV2( + any(), + Mockito.eq(Constants.STRUCTURED_PROPERTY_ENTITY_NAME), + any(), + Mockito.eq(null))) + .thenReturn(response); + } else { + Mockito.when( + client.getV2( + any(), + Mockito.eq(Constants.STRUCTURED_PROPERTY_ENTITY_NAME), + any(), + Mockito.eq(null))) + .thenThrow(new RemoteInvocationException()); + } + + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java new file mode 100644 index 00000000000000..a82dffeeea00b3 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolverTest.java @@ -0,0 +1,233 @@ +package com.linkedin.datahub.graphql.resolvers.structuredproperties; + +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.PropertyValueInput; +import com.linkedin.datahub.graphql.generated.StringValue; +import com.linkedin.datahub.graphql.generated.StructuredPropertyInputParams; +import com.linkedin.datahub.graphql.generated.UpsertStructuredPropertiesInput; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.structured.PrimitivePropertyValue; +import com.linkedin.structured.PrimitivePropertyValueArray; +import com.linkedin.structured.StructuredProperties; +import com.linkedin.structured.StructuredPropertyValueAssignment; +import com.linkedin.structured.StructuredPropertyValueAssignmentArray; +import graphql.com.google.common.collect.ImmutableList; +import graphql.com.google.common.collect.ImmutableSet; +import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.context.OperationContext; +import java.util.concurrent.CompletionException; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class UpsertStructuredPropertiesResolverTest { + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)"; + private static final String PROPERTY_URN_1 = "urn:li:structuredProperty:test1"; + private static final String PROPERTY_URN_2 = "urn:li:structuredProperty:test2"; + + private static final StructuredPropertyInputParams PROP_INPUT_1 = + new StructuredPropertyInputParams( + PROPERTY_URN_1, ImmutableList.of(new PropertyValueInput("test1", null))); + private static final StructuredPropertyInputParams PROP_INPUT_2 = + new StructuredPropertyInputParams( + PROPERTY_URN_2, ImmutableList.of(new PropertyValueInput("test2", null))); + private static final UpsertStructuredPropertiesInput TEST_INPUT = + new UpsertStructuredPropertiesInput( + TEST_DATASET_URN, ImmutableList.of(PROP_INPUT_1, PROP_INPUT_2)); + + @Test + public void testGetSuccessUpdateExisting() throws Exception { + // mock it so that this entity already has values for the given two properties + StructuredPropertyValueAssignmentArray initialProperties = + new StructuredPropertyValueAssignmentArray(); + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_1)) + .setValues(propertyValues)); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_2)) + .setValues(propertyValues)); + EntityClient mockEntityClient = initMockEntityClient(true, initialProperties); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test1").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test2").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetSuccessNoExistingProps() throws Exception { + // mock so the original entity has no structured properties + EntityClient mockEntityClient = initMockEntityClient(true, null); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test2").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test1").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testGetSuccessOneExistingOneNew() throws Exception { + // mock so the original entity has one of the input props and one is new + StructuredPropertyValueAssignmentArray initialProperties = + new StructuredPropertyValueAssignmentArray(); + PrimitivePropertyValueArray propertyValues = new PrimitivePropertyValueArray(); + propertyValues.add(PrimitivePropertyValue.create("hello")); + initialProperties.add( + new StructuredPropertyValueAssignment() + .setPropertyUrn(UrnUtils.getUrn(PROPERTY_URN_1)) + .setValues(propertyValues)); + EntityClient mockEntityClient = initMockEntityClient(true, initialProperties); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + com.linkedin.datahub.graphql.generated.StructuredProperties result = + resolver.get(mockEnv).get(); + + assertEquals(result.getProperties().size(), 2); + assertEquals(result.getProperties().get(0).getStructuredProperty().getUrn(), PROPERTY_URN_1); + assertEquals(result.getProperties().get(0).getValues().size(), 1); + assertEquals( + result.getProperties().get(0).getValues().get(0).toString(), + new StringValue("test1").toString()); + assertEquals(result.getProperties().get(1).getStructuredProperty().getUrn(), PROPERTY_URN_2); + assertEquals(result.getProperties().get(1).getValues().size(), 1); + assertEquals( + result.getProperties().get(1).getValues().get(0).toString(), + new StringValue("test2").toString()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + @Test + public void testThrowsError() throws Exception { + EntityClient mockEntityClient = initMockEntityClient(false, null); + UpsertStructuredPropertiesResolver resolver = + new UpsertStructuredPropertiesResolver(mockEntityClient); + + // Execute resolver + QueryContext mockContext = getMockAllowContext(); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + + // Validate that we called ingestProposal the correct number of times + Mockito.verify(mockEntityClient, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(MetadataChangeProposal.class), Mockito.eq(false)); + } + + private EntityClient initMockEntityClient( + final boolean shouldSucceed, @Nullable StructuredPropertyValueAssignmentArray properties) + throws Exception { + Urn assetUrn = UrnUtils.getUrn(TEST_DATASET_URN); + EntityClient client = Mockito.mock(EntityClient.class); + + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(assetUrn), any())) + .thenReturn(true); + Mockito.when(client.exists(any(OperationContext.class), Mockito.eq(assetUrn))).thenReturn(true); + + if (!shouldSucceed) { + Mockito.doThrow(new RuntimeException()) + .when(client) + .getV2(any(), Mockito.any(), Mockito.any(), Mockito.any()); + } else { + if (properties == null) { + Mockito.when( + client.getV2( + any(), + Mockito.eq(assetUrn.getEntityType()), + Mockito.eq(assetUrn), + Mockito.eq(ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME)))) + .thenReturn(null); + } else { + StructuredProperties structuredProps = new StructuredProperties(); + structuredProps.setProperties(properties); + EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); + aspectMap.put( + STRUCTURED_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(structuredProps.data()))); + EntityResponse response = new EntityResponse(); + response.setAspects(aspectMap); + Mockito.when( + client.getV2( + any(), + Mockito.eq(assetUrn.getEntityType()), + Mockito.eq(assetUrn), + Mockito.eq(ImmutableSet.of(STRUCTURED_PROPERTIES_ASPECT_NAME)))) + .thenReturn(response); + } + } + return client; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 268d6a6bc4268d..ee1d59cdf87c75 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -13,141 +18,156 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTagsResolver; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class AddTagsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(originalTags); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalTags); + + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); AddTagsResolver resolver = new AddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -157,25 +177,28 @@ public void testGetFailureTagDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); AddTagsResolver resolver = new AddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -185,15 +208,14 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); AddTagsResolver resolver = new AddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -204,23 +226,22 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 651b89359c83fa..5f6db4cb1e5a56 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -15,263 +20,290 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchAddTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true)); } @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(false); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index f302540eba9048..9f34c0da82744a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -1,7 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; @@ -16,7 +21,7 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; @@ -26,60 +31,68 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchRemoveTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @Test public void testGetSuccessNoExistingTags() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); proposal2.setEntityType(Constants.DATASET_ENTITY_NAME); proposal2.setAspectName(Constants.GLOBAL_TAGS_ASPECT_NAME); @@ -91,145 +104,167 @@ public void testGetSuccessNoExistingTags() throws Exception { @Test public void testGetSuccessExistingTags() throws Exception { - EntityService mockService = getMockEntityService(); - - final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + final GlobalTags oldTags1 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags1); - final GlobalTags oldTags2 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags2 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TAG_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index f801daf4f2a3f1..6c8984addb2651 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -1,39 +1,37 @@ package com.linkedin.datahub.graphql.resolvers.tag; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.tag.TagProperties; import com.linkedin.metadata.key.TagKey; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.tag.TagProperties; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class CreateTagResolverTest { - private static final CreateTagInput TEST_INPUT = new CreateTagInput( - "test-id", - "test-name", - "test-description" - ); + private static final CreateTagInput TEST_INPUT = + new CreateTagInput("test-id", "test-name", "test-description"); @Test public void testGetSuccess() throws Exception { // Create resolver - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + any(), Mockito.any(MetadataChangeProposal.class), anyBoolean())) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -50,21 +48,18 @@ public void testGetSuccess() throws Exception { TagProperties props = new TagProperties(); props.setDescription("test-description"); props.setName("test-name"); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, props); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, props); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); } @Test public void testGetUnauthorized() throws Exception { // Create resolver - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -75,20 +70,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), Mockito.eq(false)); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); // Execute resolver @@ -99,4 +91,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java index b01ac1a9b14ae9..2c2317cf555975 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTagResolverTest { private static final String TEST_URN = "urn:li:tag:test-id"; @@ -30,10 +29,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(Urn.createFromString(TEST_URN))); } @Test @@ -49,8 +46,6 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), Mockito.any()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index b5bbf0775a8bae..92f7a2688b43eb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -1,6 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.tag; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -23,11 +29,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetTagColorResolverTest { private static final String TEST_ENTITY_URN = "urn:li:tag:test-tag"; @@ -37,17 +38,20 @@ public class SetTagColorResolverTest { public void testGetSuccessExistingProperties() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTagProperties); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -59,35 +63,35 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final TagProperties newTagProperties = new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - TAG_PROPERTIES_ASPECT_NAME, newTagProperties); + final TagProperties newTagProperties = + new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), TAG_PROPERTIES_ASPECT_NAME, newTagProperties); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + verifyIngestProposal(mockClient, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true)); } @Test public void testGetFailureNoExistingProperties() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); // Test setting the domain - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0))) + Mockito.when( + mockService.getAspect( + any(), + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -99,9 +103,7 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test @@ -111,24 +113,30 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - final EnvelopedAspect oldTagPropertiesAspect = new EnvelopedAspect() - .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) - .setValue(new Aspect(oldTagProperties.data())); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.TAG_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.TAG_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.TAG_PROPERTIES_ASPECT_NAME, - oldTagPropertiesAspect))))); - - EntityService mockService = getMockEntityService(); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); + final EnvelopedAspect oldTagPropertiesAspect = + new EnvelopedAspect() + .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) + .setValue(new Aspect(oldTagProperties.data())); + Mockito.when( + mockClient.batchGetV2( + any(), + Mockito.eq(Constants.TAG_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.TAG_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); + + EntityService mockService = getMockEntityService(); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); @@ -139,16 +147,14 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("colorHex"))).thenReturn(TEST_COLOR_HEX); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); SetTagColorResolver resolver = new SetTagColorResolver(mockClient, mockService); // Execute resolver @@ -159,18 +165,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetTagColorResolver resolver = new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), anyBoolean()); + SetTagColorResolver resolver = + new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -181,4 +186,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 213d21fd35dc1e..8f8a071ce89329 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -13,210 +17,212 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddTermsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) + .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) - ); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true)); } @Test public void testGetFailureTermDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(false); AddTermsResolver resolver = new AddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); AddTermsResolver resolver = new AddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); AddTermsResolver resolver = new AddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 8887bb452b478c..ced9e371814f7e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -14,145 +18,169 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchAddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_GLOSSARY_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_GLOSSARY_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) - ); - - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN))))); + + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of( - TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)), eq(true)); } @Test public void testGetFailureTagDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) + .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -161,34 +189,43 @@ public void testGetFailureTagDoesNotExist() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when( + mockService.exists(any(), eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)), eq(true))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -197,18 +234,19 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -218,23 +256,24 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 995a4acb8a4676..254a301159ac25 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,7 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.GlossaryTermAssociationArray; import com.linkedin.common.GlossaryTerms; @@ -14,56 +18,62 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @Test public void testGetSuccessNoExistingTerms() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -72,47 +82,64 @@ public void testGetSuccessNoExistingTerms() throws Exception { @Test public void testGetSuccessExistingTerms() throws Exception { - EntityService mockService = getMockEntityService(); - - final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + final GlossaryTerms oldTerms1 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN))))); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms1); - final GlossaryTerms oldTerms2 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms2 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(oldTerms2); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -121,35 +148,42 @@ public void testGetSuccessExistingTerms() throws Exception { @Test public void testGetFailureResourceDoesNotExist() throws Exception { - EntityService mockService = getMockEntityService(); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + EntityService mockService = getMockEntityService(); + + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + any(), + eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_1)), eq(true))) + .thenReturn(false); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_ENTITY_URN_2)), eq(true))) + .thenReturn(true); + Mockito.when(mockService.exists(any(), eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) + .thenReturn(true); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); @@ -158,19 +192,19 @@ public void testGetFailureResourceDoesNotExist() throws Exception { @Test public void testGetUnauthorized() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -180,26 +214,26 @@ public void testGetUnauthorized() throws Exception { @Test public void testGetEntityClientException() throws Exception { - EntityService mockService = getMockEntityService(); + EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal(any(), Mockito.any(AspectsBatchImpl.class), Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + Mockito.when(mockEnv.getArgument(eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java index 911152d8c97c12..b23fc855dc835f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.test; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; @@ -19,19 +22,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestResolverTest { - private static final CreateTestInput TEST_INPUT = new CreateTestInput( - "test-id", - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final CreateTestInput TEST_INPUT = + new CreateTestInput( + "test-id", + "test-name", + "test-category", + "test-description", + new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -50,16 +49,20 @@ public void testGetSuccess() throws Exception { final TestKey key = new TestKey(); key.setId("test-id"); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(any(), proposalCaptor.capture(), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityKeyAspect(), GenericRecordUtils.serializeAspect(key)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -80,19 +83,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any(), Mockito.eq(false)); CreateTestResolver resolver = new CreateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java index 6a449e3c4c4c4b..b4900fa86c3257 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.test; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; @@ -30,10 +29,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(any(), Mockito.eq(Urn.createFromString(TEST_URN))); } @Test @@ -49,8 +46,6 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(any(), Mockito.any()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 5026e015039e1e..5e3cd539cade76 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; @@ -18,37 +20,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListTestsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("test", "test-id"); - private static final ListTestsInput TEST_INPUT = new ListTestsInput( - 0, 20, null - ); + private static final ListTestsInput TEST_INPUT = new ListTestsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.TEST_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.TEST_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListTestsResolver resolver = new ListTestsResolver(mockClient); @@ -75,33 +73,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search(any(), any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), Mockito.anyInt()); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver @@ -112,4 +98,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java index ae24232bce17cd..af009786f4d50d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.test; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; +import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -19,19 +22,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; - private static final UpdateTestInput TEST_INPUT = new UpdateTestInput( - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final UpdateTestInput TEST_INPUT = + new UpdateTestInput( + "test-name", "test-category", "test-description", new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -48,16 +44,20 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal(any(), proposalCaptor.capture(), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityUrn(), UrnUtils.getUrn(TEST_URN)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -79,18 +79,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any()); } @Test public void testGetEntityClientException() throws Exception { // Update resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(any(), Mockito.any()); UpdateTestResolver resolver = new UpdateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java index 2164d4160634ce..742e162963ea38 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateNativeUserResetTokenResolverTest { private static final String RESET_TOKEN = "resetToken"; @@ -47,7 +46,8 @@ public void testFailsNullUserUrn() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -59,7 +59,8 @@ public void testPasses() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java index 0957acf0cbbb30..502188d4977a53 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,6 +19,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -23,34 +29,35 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateViewResolverTest { - private static final CreateViewInput TEST_INPUT = new CreateViewInput( - DataHubViewType.PERSONAL, - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final CreateViewInput TEST_INPUT = + new CreateViewInput( + DataHubViewType.PERSONAL, + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -71,37 +78,59 @@ public void testGetSuccess() throws Exception { assertEquals(view.getDescription(), TEST_INPUT.getDescription()); assertEquals(view.getViewType(), TEST_INPUT.getViewType()); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - assertEquals(view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); - assertEquals(view.getDefinition().getFilter().getOperator(), TEST_INPUT.getDefinition().getFilter().getOperator()); - assertEquals(view.getDefinition().getFilter().getFilters().size(), TEST_INPUT.getDefinition().getFilter().getFilters().size()); - - Mockito.verify(mockService, Mockito.times(1)).createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + assertEquals( + view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); + assertEquals( + view.getDefinition().getFilter().getOperator(), + TEST_INPUT.getDefinition().getFilter().getOperator()); + assertEquals( + view.getDefinition().getFilter().getFilters().size(), + TEST_INPUT.getDefinition().getFilter().getFilters().size()); + + Mockito.verify(mockService, Mockito.times(1)) + .createView( + any(), + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.anyLong()); } @Test @@ -118,22 +147,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), Mockito.any(), anyBoolean()); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createView( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createView( + any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.anyLong()); CreateViewResolver resolver = new CreateViewResolver(mockService); @@ -148,14 +172,15 @@ public void testGetViewServiceException() throws Exception { private ViewService initMockService() { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_VIEW_URN); + Mockito.when( + service.createView( + any(), + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(), + Mockito.anyLong())) + .thenReturn(TEST_VIEW_URN); return service; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java index afb4c16767f47c..0644bc2117a846 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -17,10 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -40,10 +39,7 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).deleteView(any(), Mockito.eq(TEST_URN)); } @Test @@ -60,10 +56,7 @@ public void testGetSuccessGlobalViewCanManager() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).deleteView(any(), Mockito.eq(TEST_URN)); } @Test @@ -79,13 +72,9 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)).deleteView(any(), Mockito.eq(TEST_URN)); } - @Test public void testGetSuccessPersonalViewIsCreator() throws Exception { ViewService mockService = initViewService(DataHubViewType.PERSONAL); @@ -99,10 +88,7 @@ public void testGetSuccessPersonalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).deleteView(any(), Mockito.eq(TEST_URN)); } @Test @@ -118,19 +104,14 @@ public void testGetFailurePersonalViewIsNotCreator() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)).deleteView(any(), Mockito.eq(TEST_URN)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteView( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class).when(mockService).deleteView(any(), Mockito.any()); DeleteViewResolver resolver = new DeleteViewResolver(mockService); @@ -146,19 +127,20 @@ public void testGetViewServiceException() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) - .thenReturn(testInfo); + Mockito.when(mockService.getViewInfo(any(), Mockito.eq(TEST_URN))).thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 9a25c9eb1d25c6..a3b9e25e992259 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -12,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.ListViewsResult; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -28,53 +30,52 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListGlobalViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput( - 0, 20, "" - ); + private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput(0, 20, ""); @Test public void testGetSuccessInput() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListGlobalViewsResolver resolver = new ListGlobalViewsResolver(mockClient); @@ -107,28 +108,29 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver @@ -139,4 +141,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 4c435841448251..99b0e76976748e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -11,7 +14,6 @@ import com.linkedin.datahub.graphql.generated.ListMyViewsInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -27,63 +29,64 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListMyViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListMyViewsInput TEST_INPUT_1 = new ListMyViewsInput( - 0, 20, "", DataHubViewType.GLOBAL - ); + private static final ListMyViewsInput TEST_INPUT_1 = + new ListMyViewsInput(0, 20, "", DataHubViewType.GLOBAL); - private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput( - 0, 20, "", null - ); + private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput(0, 20, "", null); @Test public void testGetSuccessInput1() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false), + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false), new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -106,35 +109,40 @@ public void testGetSuccessInput2() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -165,28 +173,29 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + any(), Mockito.any(), Mockito.eq(""), Mockito.anyMap(), Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.anyInt()); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); // Execute resolver @@ -197,4 +206,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java index b4895982ae7801..fe32bec01fd7ed 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; @@ -38,30 +42,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateViewInput TEST_INPUT = new UpdateViewInput( - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final UpdateViewInput TEST_INPUT = + new UpdateViewInput( + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); @Test public void testGetSuccessGlobalViewIsCreator() throws Exception { @@ -81,33 +88,50 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + any(), + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.anyLong()); } @Test @@ -128,46 +152,59 @@ public void testGetSuccessGlobalViewManageGlobalViews() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + any(), + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.anyLong()); } @Test public void testGetViewServiceException() throws Exception { // Update resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateView( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateView(any(), any(Urn.class), any(), any(), any(), Mockito.anyLong()); UpdateViewResolver resolver = new UpdateViewResolver(mockService); @@ -196,43 +233,42 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)).ingestProposal(any(), any(), anyBoolean()); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) - .thenReturn(testInfo); - - Mockito.when(mockService.getViewEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getViewInfo(any(), Mockito.eq(TEST_URN))).thenReturn(testInfo); + + Mockito.when(mockService.getViewEntityResponse(any(), Mockito.eq(TEST_URN))) .thenReturn(testEntityResponse); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java index 9578ff201ca194..701ddd84c173e7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.view; -import com.datahub.authentication.Authentication; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -14,6 +18,7 @@ import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -25,12 +30,8 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.Assert; -import org.testng.annotations.Test; import org.mockito.Mockito; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - +import org.testng.annotations.Test; public class ViewUtilsTest { @@ -39,10 +40,10 @@ public class ViewUtilsTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); - @Test public static void testCanCreatePersonalViewAllowed() { - boolean res = ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); + boolean res = + ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); Assert.assertTrue(res); } @@ -67,10 +68,7 @@ public void testCanUpdateViewSuccessGlobalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(TEST_VIEW_URN)); } @Test @@ -80,10 +78,7 @@ public void testCanUpdateViewSuccessGlobalViewCanManageGlobalViews() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(TEST_VIEW_URN)); } @Test @@ -93,10 +88,7 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(TEST_VIEW_URN)); } @Test @@ -106,10 +98,7 @@ public void testGetSuccessPersonalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(TEST_VIEW_URN)); } @Test @@ -119,69 +108,88 @@ public void testGetFailurePersonalViewIsNotCreator() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)).getViewInfo(any(), Mockito.eq(TEST_VIEW_URN)); } @Test public void testMapDefinition() throws Exception { - DataHubViewDefinitionInput input = new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.IN), - new FacetFilterInput("test2", null, ImmutableList.of("value3", "value4"), true, FilterOperator.CONTAIN) - ) - ) - ); - - DataHubViewDefinition expectedResult = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion() - .setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setValue("value1") // Disgraceful - .setField("test1.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.IN), - new Criterion() - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("value3", "value4"))) - .setValue("value3") // Disgraceful - .setField("test2.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.CONTAIN) - )) - ) - ) - )) - ); - - assertEquals(ViewUtils.mapDefinition(input), expectedResult); + DataHubViewDefinitionInput input = + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.IN), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value3", "value4"), + true, + FilterOperator.CONTAIN)))); + + DataHubViewDefinition expectedResult = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setNegated(false) + .setValues( + new StringArray( + ImmutableList.of("value1", "value2"))) + .setValue("value1") // Disgraceful + .setField( + "test1.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.IN), + new Criterion() + .setNegated(true) + .setValues( + new StringArray( + ImmutableList.of("value3", "value4"))) + .setValue("value3") // Disgraceful + .setField( + "test2.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.CONTAIN)))))))); + + assertEquals(ViewUtils.mapDefinition(input, mock(AspectRetriever.class)), expectedResult); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class))) - .thenReturn(testInfo); + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when(mockService.getViewInfo(any(), Mockito.eq(TEST_VIEW_URN))).thenReturn(testInfo); return mockService; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapperTest.java new file mode 100644 index 00000000000000..82f4fe687bf769 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapperTest.java @@ -0,0 +1,415 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import static org.testng.Assert.assertEquals; + +import com.google.common.collect.ImmutableList; +import com.linkedin.assertion.AssertionInfo; +import com.linkedin.assertion.AssertionSource; +import com.linkedin.assertion.AssertionSourceType; +import com.linkedin.assertion.AssertionStdAggregation; +import com.linkedin.assertion.AssertionStdOperator; +import com.linkedin.assertion.AssertionStdParameter; +import com.linkedin.assertion.AssertionStdParameterType; +import com.linkedin.assertion.AssertionStdParameters; +import com.linkedin.assertion.AssertionType; +import com.linkedin.assertion.CustomAssertionInfo; +import com.linkedin.assertion.DatasetAssertionInfo; +import com.linkedin.assertion.DatasetAssertionScope; +import com.linkedin.assertion.FreshnessAssertionInfo; +import com.linkedin.assertion.FreshnessAssertionSchedule; +import com.linkedin.assertion.FreshnessAssertionScheduleType; +import com.linkedin.assertion.FreshnessAssertionType; +import com.linkedin.assertion.FreshnessCronSchedule; +import com.linkedin.assertion.SchemaAssertionCompatibility; +import com.linkedin.assertion.SchemaAssertionInfo; +import com.linkedin.common.GlobalTags; +import com.linkedin.common.TagAssociationArray; +import com.linkedin.common.UrnArray; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.DataMap; +import com.linkedin.data.template.StringMap; +import com.linkedin.datahub.graphql.generated.Assertion; +import com.linkedin.datahub.graphql.generated.FixedIntervalSchedule; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import com.linkedin.schema.MySqlDDL; +import com.linkedin.schema.SchemaField; +import com.linkedin.schema.SchemaFieldArray; +import com.linkedin.schema.SchemaFieldDataType; +import com.linkedin.schema.SchemaMetadata; +import com.linkedin.schema.StringType; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class AssertionMapperTest { + + @Test + public void testMapDatasetAssertion() { + // Case 1: Without nullable fields + AssertionInfo input = createFreshnessAssertionInfoWithoutNullableFields(); + EntityResponse datasetAssertionEntityResponse = createAssertionInfoEntityResponse(input); + Assertion output = AssertionMapper.map(null, datasetAssertionEntityResponse); + verifyAssertionInfo(input, output); + + // Case 2: With nullable fields + input = createFreshnessAssertionInfoWithNullableFields(); + EntityResponse datasetAssertionEntityResponseWithNullables = + createAssertionInfoEntityResponse(input); + output = AssertionMapper.map(null, datasetAssertionEntityResponseWithNullables); + verifyAssertionInfo(input, output); + } + + @Test + public void testMapTags() throws Exception { + HashMap aspects = new HashMap<>(); + AssertionInfo info = createFreshnessAssertionInfoWithoutNullableFields(); + + EnvelopedAspect envelopedTagsAspect = new EnvelopedAspect(); + GlobalTags tags = new GlobalTags(); + tags.setTags( + new TagAssociationArray( + new TagAssociationArray( + Collections.singletonList( + new com.linkedin.common.TagAssociation() + .setTag(TagUrn.createFromString("urn:li:tag:test")))))); + envelopedTagsAspect.setValue(new Aspect(tags.data())); + + aspects.put(Constants.ASSERTION_INFO_ASPECT_NAME, createEnvelopedAspect(info.data())); + aspects.put(Constants.GLOBAL_TAGS_ASPECT_NAME, createEnvelopedAspect(tags.data())); + EntityResponse response = createEntityResponse(aspects); + + Assertion assertion = AssertionMapper.map(null, response); + assertEquals(assertion.getTags().getTags().size(), 1); + assertEquals( + assertion.getTags().getTags().get(0).getTag().getUrn().toString(), "urn:li:tag:test"); + } + + @Test + public void testMapFreshnessAssertion() { + // Case 1: Without nullable fields + AssertionInfo inputInfo = createFreshnessAssertionInfoWithoutNullableFields(); + + EntityResponse freshnessAssertionEntityResponse = createAssertionInfoEntityResponse(inputInfo); + Assertion output = AssertionMapper.map(null, freshnessAssertionEntityResponse); + verifyAssertionInfo(inputInfo, output); + + // Case 2: With nullable fields + inputInfo = createDatasetAssertionInfoWithNullableFields(); + EntityResponse freshnessAssertionEntityResponseWithNullables = + createAssertionInfoEntityResponse(inputInfo); + output = AssertionMapper.map(null, freshnessAssertionEntityResponseWithNullables); + verifyAssertionInfo(inputInfo, output); + } + + @Test + public void testMapDataSchemaAssertion() { + AssertionInfo input = createSchemaAssertion(); + EntityResponse schemaAssertionEntityResponse = createAssertionInfoEntityResponse(input); + Assertion output = AssertionMapper.map(null, schemaAssertionEntityResponse); + verifyAssertionInfo(input, output); + } + + @Test + public void testMapCustomAssertion() { + // Case 1: Without nullable fields + AssertionInfo input = createCustomAssertionInfoWithoutNullableFields(); + EntityResponse customAssertionEntityResponse = createAssertionInfoEntityResponse(input); + Assertion output = AssertionMapper.map(null, customAssertionEntityResponse); + verifyAssertionInfo(input, output); + + // Case 2: With nullable fields + input = createCustomAssertionInfoWithNullableFields(); + EntityResponse customAssertionEntityResponseWithNullables = + createAssertionInfoEntityResponse(input); + output = AssertionMapper.map(null, customAssertionEntityResponseWithNullables); + verifyAssertionInfo(input, output); + } + + private void verifyAssertionInfo(AssertionInfo input, Assertion output) { + Assert.assertNotNull(output); + Assert.assertNotNull(output.getInfo()); + Assert.assertEquals( + output.getInfo().getType().toString(), output.getInfo().getType().toString()); + + if (input.hasDatasetAssertion()) { + verifyDatasetAssertion(input.getDatasetAssertion(), output.getInfo().getDatasetAssertion()); + } + + if (input.hasExternalUrl()) { + Assert.assertEquals(input.getExternalUrl().toString(), output.getInfo().getExternalUrl()); + } + + if (input.hasFreshnessAssertion()) { + verifyFreshnessAssertion( + input.getFreshnessAssertion(), output.getInfo().getFreshnessAssertion()); + } + + if (input.hasSchemaAssertion()) { + verifySchemaAssertion(input.getSchemaAssertion(), output.getInfo().getSchemaAssertion()); + } + + if (input.hasSource()) { + verifySource(input.getSource(), output.getInfo().getSource()); + } + + if (input.hasCustomAssertion()) { + verifyCustomAssertion(input.getCustomAssertion(), output.getInfo().getCustomAssertion()); + } + } + + private void verifyDatasetAssertion( + DatasetAssertionInfo input, + com.linkedin.datahub.graphql.generated.DatasetAssertionInfo output) { + Assert.assertEquals(output.getOperator().toString(), input.getOperator().toString()); + Assert.assertEquals(output.getOperator().toString(), input.getOperator().toString()); + Assert.assertEquals(output.getScope().toString(), input.getScope().toString()); + Assert.assertEquals(output.getDatasetUrn(), input.getDataset().toString()); + if (input.hasAggregation()) { + Assert.assertEquals(output.getAggregation().toString(), input.getAggregation().toString()); + } + if (input.hasNativeType()) { + Assert.assertEquals(output.getNativeType(), input.getNativeType().toString()); + } + if (input.hasLogic()) { + Assert.assertEquals(output.getLogic(), input.getLogic()); + } + if (input.hasFields()) { + Assert.assertTrue( + input.getFields().stream() + .allMatch( + field -> + output.getFields().stream() + .anyMatch(outField -> field.toString().equals(outField.getUrn())))); + } + } + + private void verifyFreshnessAssertion( + FreshnessAssertionInfo input, + com.linkedin.datahub.graphql.generated.FreshnessAssertionInfo output) { + Assert.assertEquals(output.getType().toString(), input.getType().toString()); + Assert.assertEquals(output.getEntityUrn(), input.getEntity().toString()); + if (input.hasSchedule()) { + verifyFreshnessSchedule(input.getSchedule(), output.getSchedule()); + } + } + + private void verifySchemaAssertion( + SchemaAssertionInfo input, + com.linkedin.datahub.graphql.generated.SchemaAssertionInfo output) { + Assert.assertEquals(output.getEntityUrn(), input.getEntity().toString()); + Assert.assertEquals(output.getCompatibility().toString(), input.getCompatibility().toString()); + Assert.assertEquals( + output.getSchema().getFields().size(), input.getSchema().getFields().size()); + } + + private void verifyCustomAssertion( + CustomAssertionInfo input, + com.linkedin.datahub.graphql.generated.CustomAssertionInfo output) { + Assert.assertEquals(output.getEntityUrn(), input.getEntity().toString()); + Assert.assertEquals(output.getType(), input.getType()); + if (input.hasLogic()) { + Assert.assertEquals(output.getLogic(), input.getLogic()); + } + if (input.hasField()) { + Assert.assertEquals(output.getField().getPath(), input.getField().getEntityKey().get(1)); + } + } + + private void verifyCronSchedule( + FreshnessCronSchedule input, + com.linkedin.datahub.graphql.generated.FreshnessCronSchedule output) { + Assert.assertEquals(output.getCron(), input.getCron()); + Assert.assertEquals(output.getTimezone(), input.getTimezone()); + if (input.hasWindowStartOffsetMs()) { + Assert.assertEquals(output.getWindowStartOffsetMs(), input.getWindowStartOffsetMs()); + } + } + + private void verifyFreshnessSchedule( + FreshnessAssertionSchedule input, + com.linkedin.datahub.graphql.generated.FreshnessAssertionSchedule output) { + Assert.assertEquals(output.getType().toString(), input.getType().toString()); + if (input.hasCron()) { + verifyCronSchedule(input.getCron(), output.getCron()); + } + if (input.hasFixedInterval()) { + verifyFixedIntervalSchedule(input.getFixedInterval(), output.getFixedInterval()); + } + } + + private void verifyFixedIntervalSchedule( + com.linkedin.assertion.FixedIntervalSchedule input, FixedIntervalSchedule output) { + Assert.assertEquals(output.getMultiple(), (int) input.getMultiple()); + Assert.assertEquals(output.getUnit().toString(), input.getUnit().toString()); + } + + private void verifySource( + AssertionSource input, com.linkedin.datahub.graphql.generated.AssertionSource output) { + Assert.assertEquals(output.getType().toString(), input.getType().toString()); + } + + private EntityResponse createAssertionInfoEntityResponse(final AssertionInfo info) { + HashMap aspects = new HashMap<>(); + aspects.put(Constants.ASSERTION_INFO_ASPECT_NAME, createEnvelopedAspect(info.data())); + + return createEntityResponse(aspects); + } + + private EntityResponse createEntityResponse(Map aspects) { + EntityResponse entityResponse = new EntityResponse(); + entityResponse.setUrn(UrnUtils.getUrn("urn:li:assertion:1")); + entityResponse.setAspects(new EnvelopedAspectMap(new HashMap<>())); + aspects.forEach( + (aspectName, envelopedAspect) -> { + entityResponse.getAspects().put(aspectName, envelopedAspect); + }); + + return entityResponse; + } + + private EnvelopedAspect createEnvelopedAspect(DataMap dataMap) { + EnvelopedAspect envelopedAspect = new EnvelopedAspect(); + envelopedAspect.setValue(new Aspect(dataMap)); + return envelopedAspect; + } + + private AssertionInfo createDatasetAssertionInfoWithoutNullableFields() { + AssertionInfo info = new AssertionInfo(); + info.setType(com.linkedin.assertion.AssertionType.DATASET); + DatasetAssertionInfo datasetAssertionInfo = new DatasetAssertionInfo(); + datasetAssertionInfo.setDataset(UrnUtils.getUrn("urn:li:dataset:1")); + datasetAssertionInfo.setScope(DatasetAssertionScope.DATASET_COLUMN); + datasetAssertionInfo.setOperator(AssertionStdOperator.GREATER_THAN); + info.setDatasetAssertion(datasetAssertionInfo); + return info; + } + + private AssertionInfo createDatasetAssertionInfoWithNullableFields() { + AssertionInfo infoWithoutNullables = createDatasetAssertionInfoWithoutNullableFields(); + DatasetAssertionInfo baseInfo = infoWithoutNullables.getDatasetAssertion(); + baseInfo.setFields( + new UrnArray( + Arrays.asList( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD),field)")))); + baseInfo.setAggregation(AssertionStdAggregation.SUM); + baseInfo.setParameters(createAssertionStdParameters()); + baseInfo.setNativeType("native_type"); + baseInfo.setNativeParameters(new StringMap(Collections.singletonMap("key", "value"))); + baseInfo.setLogic("sample_logic"); + infoWithoutNullables.setSource( + new AssertionSource().setType(com.linkedin.assertion.AssertionSourceType.INFERRED)); + return infoWithoutNullables; + } + + private AssertionInfo createFreshnessAssertionInfoWithoutNullableFields() { + AssertionInfo info = new AssertionInfo(); + info.setType(AssertionType.FRESHNESS); + FreshnessAssertionInfo freshnessAssertionInfo = new FreshnessAssertionInfo(); + freshnessAssertionInfo.setEntity( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)")); + freshnessAssertionInfo.setType(FreshnessAssertionType.DATASET_CHANGE); + info.setFreshnessAssertion(freshnessAssertionInfo); + return info; + } + + private AssertionInfo createFreshnessAssertionInfoWithNullableFields() { + AssertionInfo infoWithoutNullables = createFreshnessAssertionInfoWithoutNullableFields(); + FreshnessAssertionInfo baseInfo = infoWithoutNullables.getFreshnessAssertion(); + baseInfo.setSchedule(createFreshnessAssertionSchedule()); + infoWithoutNullables.setSource( + new AssertionSource().setType(com.linkedin.assertion.AssertionSourceType.INFERRED)); + return infoWithoutNullables; + } + + private AssertionInfo createSchemaAssertion() { + AssertionInfo info = new AssertionInfo(); + info.setType(AssertionType.DATA_SCHEMA); + SchemaAssertionInfo schemaAssertionInfo = new SchemaAssertionInfo(); + schemaAssertionInfo.setEntity(UrnUtils.getUrn("urn:li:dataset:1")); + schemaAssertionInfo.setCompatibility(SchemaAssertionCompatibility.SUPERSET); + schemaAssertionInfo.setSchema( + new SchemaMetadata() + .setCluster("Test") + .setHash("Test") + .setPlatformSchema(SchemaMetadata.PlatformSchema.create(new MySqlDDL())) + .setFields( + new SchemaFieldArray( + ImmutableList.of( + new SchemaField() + .setType( + new SchemaFieldDataType() + .setType(SchemaFieldDataType.Type.create(new StringType()))) + .setNullable(false) + .setNativeDataType("string") + .setFieldPath("test"))))); + return info; + } + + private AssertionInfo createCustomAssertionInfoWithoutNullableFields() { + AssertionInfo info = new AssertionInfo(); + info.setType(AssertionType.CUSTOM); + CustomAssertionInfo customAssertionInfo = new CustomAssertionInfo(); + customAssertionInfo.setType("Custom Type 1"); + customAssertionInfo.setEntity(UrnUtils.getUrn("urn:li:dataset:1")); + info.setCustomAssertion(customAssertionInfo); + return info; + } + + private AssertionInfo createCustomAssertionInfoWithNullableFields() { + AssertionInfo info = new AssertionInfo(); + info.setType(AssertionType.CUSTOM); + info.setExternalUrl(new Url("https://xyz.com")); + info.setDescription("Description of custom assertion"); + CustomAssertionInfo customAssertionInfo = new CustomAssertionInfo(); + customAssertionInfo.setType("Custom Type 1"); + customAssertionInfo.setEntity( + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD)")); + customAssertionInfo.setField( + UrnUtils.getUrn( + "urn:li:schemaField:(urn:li:dataset:(urn:li:dataPlatform:hive,name,PROD),field)")); + customAssertionInfo.setLogic("custom logic"); + info.setCustomAssertion(customAssertionInfo); + info.setSource(new AssertionSource().setType(AssertionSourceType.EXTERNAL)); + + return info; + } + + private AssertionStdParameters createAssertionStdParameters() { + AssertionStdParameters parameters = new AssertionStdParameters(); + parameters.setValue(createAssertionStdParameter()); + parameters.setMinValue(createAssertionStdParameter()); + parameters.setMaxValue(createAssertionStdParameter()); + return parameters; + } + + private AssertionStdParameter createAssertionStdParameter() { + AssertionStdParameter parameter = new AssertionStdParameter(); + parameter.setType(AssertionStdParameterType.NUMBER); + parameter.setValue("100"); + return parameter; + } + + private FreshnessAssertionSchedule createFreshnessAssertionSchedule() { + FreshnessAssertionSchedule schedule = new FreshnessAssertionSchedule(); + schedule.setType(FreshnessAssertionScheduleType.CRON); + schedule.setCron(createCronSchedule()); + return schedule; + } + + private FreshnessCronSchedule createCronSchedule() { + FreshnessCronSchedule cronSchedule = new FreshnessCronSchedule(); + cronSchedule.setCron("0 0 * * *"); + cronSchedule.setTimezone("UTC"); + return cronSchedule; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c4465c7d3cb659..33774690b7c7a9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,9 +1,16 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.assertion.AssertionAction; +import com.linkedin.assertion.AssertionActionArray; +import com.linkedin.assertion.AssertionActionType; +import com.linkedin.assertion.AssertionActions; import com.linkedin.assertion.AssertionInfo; import com.linkedin.assertion.AssertionType; import com.linkedin.common.DataPlatformInstance; @@ -23,32 +30,42 @@ import com.linkedin.metadata.key.AssertionKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class AssertionTypeTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:guid-1"; - private static final AssertionKey TEST_ASSERTION_KEY = new AssertionKey() - .setAssertionId("guid-1"); - private static final AssertionInfo TEST_ASSERTION_INFO = new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(null, SetMode.IGNORE_NULL) - .setCustomProperties(new StringMap()); - private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("snowflake")) - .setInstance(null, SetMode.IGNORE_NULL); + private static final AssertionKey TEST_ASSERTION_KEY = + new AssertionKey().setAssertionId("guid-1"); + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion(null, SetMode.IGNORE_NULL) + .setCustomProperties(new StringMap()); + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn("snowflake")) + .setInstance(null, SetMode.IGNORE_NULL); + // Acryl SaaS Only + private static final AssertionActions TEST_ASSERTION_ACTIONS = + new AssertionActions() + .setOnSuccess( + new AssertionActionArray( + ImmutableList.of( + new AssertionAction().setType(AssertionActionType.RAISE_INCIDENT)))) + .setOnFailure( + new AssertionActionArray( + ImmutableList.of( + new AssertionAction().setType(AssertionActionType.RESOLVE_INCIDENT)))); private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:guid-2"; - @Test public void testBatchLoad() throws Exception { @@ -60,41 +77,50 @@ public void testBatchLoad() throws Exception { Map assertion1Aspects = new HashMap<>(); assertion1Aspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data()))); assertion1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))); assertion1Aspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - assertionUrn1, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn1) - .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); - - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data()))); + assertion1Aspects.put( + Constants.ASSERTION_ACTIONS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_ACTIONS.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + assertionUrn1, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn1) + .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); + + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -104,6 +130,12 @@ public void testBatchLoad() throws Exception { assertEquals(assertion.getInfo().getType().toString(), AssertionType.DATASET.toString()); assertEquals(assertion.getInfo().getDatasetAssertion(), null); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:snowflake"); + assertEquals( + assertion.getActions().getOnSuccess().get(0).getType(), + com.linkedin.datahub.graphql.generated.AssertionActionType.RAISE_INCIDENT); + assertEquals( + assertion.getActions().getOnFailure().get(0).getType(), + com.linkedin.datahub.graphql.generated.AssertionActionType.RESOLVE_INCIDENT); // Assert second element is null. assertNull(result.get(1)); @@ -112,17 +144,17 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapperTest.java new file mode 100644 index 00000000000000..7758aaa986fed3 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FieldAssertionMapperTest.java @@ -0,0 +1,100 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.AssertionStdOperator; +import com.linkedin.assertion.FieldAssertionInfo; +import com.linkedin.assertion.FieldAssertionType; +import com.linkedin.assertion.FieldMetricAssertion; +import com.linkedin.assertion.FieldMetricType; +import com.linkedin.assertion.FieldTransform; +import com.linkedin.assertion.FieldTransformType; +import com.linkedin.assertion.FieldValuesAssertion; +import com.linkedin.assertion.FieldValuesFailThreshold; +import com.linkedin.assertion.FieldValuesFailThresholdType; +import com.linkedin.common.urn.Urn; +import com.linkedin.dataset.DatasetFilter; +import com.linkedin.dataset.DatasetFilterType; +import com.linkedin.schema.SchemaFieldSpec; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class FieldAssertionMapperTest { + @Test + public void testMapFieldValuesAssertionInfo() throws Exception { + FieldAssertionInfo fieldAssertionInfo = + new FieldAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setFilter( + new DatasetFilter().setType(DatasetFilterType.SQL).setSql("WHERE value > 5;")) + .setType(FieldAssertionType.FIELD_VALUES) + .setFieldValuesAssertion( + new FieldValuesAssertion() + .setExcludeNulls(true) + .setFailThreshold( + new FieldValuesFailThreshold() + .setType(FieldValuesFailThresholdType.PERCENTAGE) + .setValue(5L)) + .setField( + new SchemaFieldSpec() + .setPath("path") + .setType("STRING") + .setNativeType("VARCHAR")) + .setOperator(AssertionStdOperator.IS_TRUE) + .setTransform(new FieldTransform().setType(FieldTransformType.LENGTH))); + + com.linkedin.datahub.graphql.generated.FieldAssertionInfo result = + FieldAssertionMapper.mapFieldAssertionInfo(null, fieldAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), com.linkedin.datahub.graphql.generated.FieldAssertionType.FIELD_VALUES); + Assert.assertEquals( + result.getFilter().getType(), com.linkedin.datahub.graphql.generated.DatasetFilterType.SQL); + Assert.assertEquals(result.getFilter().getSql(), "WHERE value > 5;"); + Assert.assertEquals(result.getFieldValuesAssertion().getField().getPath(), "path"); + Assert.assertEquals(result.getFieldValuesAssertion().getField().getType(), "STRING"); + Assert.assertEquals(result.getFieldValuesAssertion().getField().getNativeType(), "VARCHAR"); + Assert.assertEquals( + result.getFieldValuesAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.IS_TRUE); + Assert.assertEquals( + result.getFieldValuesAssertion().getTransform().getType(), + com.linkedin.datahub.graphql.generated.FieldTransformType.LENGTH); + Assert.assertEquals(result.getFieldValuesAssertion().getExcludeNulls(), true); + Assert.assertEquals( + result.getFieldValuesAssertion().getFailThreshold().getType(), + com.linkedin.datahub.graphql.generated.FieldValuesFailThresholdType.PERCENTAGE); + Assert.assertEquals( + result.getFieldValuesAssertion().getFailThreshold().getValue(), Long.valueOf(5L)); + } + + @Test + public void testMapFieldMetricAssertionInfo() throws Exception { + FieldAssertionInfo fieldAssertionInfo = + new FieldAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(FieldAssertionType.FIELD_METRIC) + .setFieldMetricAssertion( + new FieldMetricAssertion() + .setField( + new SchemaFieldSpec() + .setPath("path") + .setType("STRING") + .setNativeType("VARCHAR")) + .setOperator(AssertionStdOperator.IS_TRUE) + .setMetric(FieldMetricType.MEDIAN)); + + com.linkedin.datahub.graphql.generated.FieldAssertionInfo result = + FieldAssertionMapper.mapFieldAssertionInfo(null, fieldAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), com.linkedin.datahub.graphql.generated.FieldAssertionType.FIELD_METRIC); + Assert.assertEquals(result.getFieldMetricAssertion().getField().getPath(), "path"); + Assert.assertEquals(result.getFieldMetricAssertion().getField().getType(), "STRING"); + Assert.assertEquals(result.getFieldMetricAssertion().getField().getNativeType(), "VARCHAR"); + Assert.assertEquals( + result.getFieldMetricAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.IS_TRUE); + Assert.assertEquals( + result.getFieldMetricAssertion().getMetric(), + com.linkedin.datahub.graphql.generated.FieldMetricType.MEDIAN); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapperTest.java new file mode 100644 index 00000000000000..b69ed02bdfd626 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/FreshnessAssertionMapperTest.java @@ -0,0 +1,82 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.FixedIntervalSchedule; +import com.linkedin.assertion.FreshnessAssertionInfo; +import com.linkedin.assertion.FreshnessAssertionSchedule; +import com.linkedin.assertion.FreshnessAssertionScheduleType; +import com.linkedin.assertion.FreshnessAssertionType; +import com.linkedin.assertion.FreshnessCronSchedule; +import com.linkedin.common.urn.Urn; +import com.linkedin.dataset.DatasetFilter; +import com.linkedin.dataset.DatasetFilterType; +import com.linkedin.timeseries.CalendarInterval; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class FreshnessAssertionMapperTest { + @Test + public void testMapCronFreshnessAssertionInfo() throws Exception { + FreshnessAssertionInfo freshnessAssertionInfo = + new FreshnessAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(FreshnessAssertionType.DATASET_CHANGE) + .setFilter( + new DatasetFilter().setType(DatasetFilterType.SQL).setSql("WHERE value > 5;")) + .setSchedule( + new FreshnessAssertionSchedule() + .setType(FreshnessAssertionScheduleType.CRON) + .setCron( + new FreshnessCronSchedule() + .setCron("0 0 0 * * ? *") + .setTimezone("America/Los_Angeles") + .setWindowStartOffsetMs(10L))); + + com.linkedin.datahub.graphql.generated.FreshnessAssertionInfo result = + FreshnessAssertionMapper.mapFreshnessAssertionInfo(null, freshnessAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.FreshnessAssertionType.DATASET_CHANGE); + Assert.assertEquals( + result.getFilter().getType(), com.linkedin.datahub.graphql.generated.DatasetFilterType.SQL); + Assert.assertEquals(result.getFilter().getSql(), "WHERE value > 5;"); + Assert.assertEquals( + result.getSchedule().getType(), + com.linkedin.datahub.graphql.generated.FreshnessAssertionScheduleType.CRON); + Assert.assertEquals(result.getSchedule().getCron().getCron(), "0 0 0 * * ? *"); + Assert.assertEquals(result.getSchedule().getCron().getTimezone(), "America/Los_Angeles"); + Assert.assertEquals(result.getSchedule().getCron().getWindowStartOffsetMs(), Long.valueOf(10L)); + } + + @Test + public void testMapFixedIntervalFreshnessAssertionInfo() throws Exception { + FreshnessAssertionInfo freshnessAssertionInfo = + new FreshnessAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(FreshnessAssertionType.DATASET_CHANGE) + .setFilter( + new DatasetFilter().setType(DatasetFilterType.SQL).setSql("WHERE value > 5;")) + .setSchedule( + new FreshnessAssertionSchedule() + .setType(FreshnessAssertionScheduleType.FIXED_INTERVAL) + .setFixedInterval( + new FixedIntervalSchedule().setUnit(CalendarInterval.DAY).setMultiple(10))); + + com.linkedin.datahub.graphql.generated.FreshnessAssertionInfo result = + FreshnessAssertionMapper.mapFreshnessAssertionInfo(null, freshnessAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.FreshnessAssertionType.DATASET_CHANGE); + Assert.assertEquals( + result.getFilter().getType(), com.linkedin.datahub.graphql.generated.DatasetFilterType.SQL); + Assert.assertEquals(result.getFilter().getSql(), "WHERE value > 5;"); + Assert.assertEquals( + result.getSchedule().getType(), + com.linkedin.datahub.graphql.generated.FreshnessAssertionScheduleType.FIXED_INTERVAL); + Assert.assertEquals( + result.getSchedule().getFixedInterval().getUnit(), + com.linkedin.datahub.graphql.generated.DateInterval.DAY); + Assert.assertEquals(result.getSchedule().getFixedInterval().getMultiple(), 10); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapperTest.java new file mode 100644 index 00000000000000..271362c9fd8468 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/SqlAssertionMapperTest.java @@ -0,0 +1,78 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.AssertionStdOperator; +import com.linkedin.assertion.AssertionStdParameter; +import com.linkedin.assertion.AssertionStdParameterType; +import com.linkedin.assertion.AssertionStdParameters; +import com.linkedin.assertion.AssertionValueChangeType; +import com.linkedin.assertion.SqlAssertionInfo; +import com.linkedin.assertion.SqlAssertionType; +import com.linkedin.common.urn.Urn; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class SqlAssertionMapperTest { + @Test + public void testMapMetricSqlAssertionInfo() throws Exception { + SqlAssertionInfo sqlAssertionInfo = + new SqlAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(SqlAssertionType.METRIC) + .setStatement("SELECT COUNT(*) FROM foo.bar.baz") + .setOperator(AssertionStdOperator.GREATER_THAN) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue(("5")))); + + com.linkedin.datahub.graphql.generated.SqlAssertionInfo result = + SqlAssertionMapper.mapSqlAssertionInfo(sqlAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), com.linkedin.datahub.graphql.generated.SqlAssertionType.METRIC); + Assert.assertEquals(result.getStatement(), "SELECT COUNT(*) FROM foo.bar.baz"); + Assert.assertEquals( + result.getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN); + Assert.assertEquals( + result.getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals(result.getParameters().getValue().getValue(), "5"); + } + + @Test + public void testMapMetricChangeSqlAssertionInfo() throws Exception { + SqlAssertionInfo sqlAssertionInfo = + new SqlAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(SqlAssertionType.METRIC_CHANGE) + .setStatement("SELECT COUNT(*) FROM foo.bar.baz") + .setChangeType(AssertionValueChangeType.ABSOLUTE) + .setOperator(AssertionStdOperator.GREATER_THAN) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue(("5")))); + + com.linkedin.datahub.graphql.generated.SqlAssertionInfo result = + SqlAssertionMapper.mapSqlAssertionInfo(sqlAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), com.linkedin.datahub.graphql.generated.SqlAssertionType.METRIC_CHANGE); + Assert.assertEquals(result.getStatement(), "SELECT COUNT(*) FROM foo.bar.baz"); + Assert.assertEquals( + result.getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN); + Assert.assertEquals( + result.getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals(result.getParameters().getValue().getValue(), "5"); + Assert.assertEquals( + result.getChangeType(), + com.linkedin.datahub.graphql.generated.AssertionValueChangeType.ABSOLUTE); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapperTest.java new file mode 100644 index 00000000000000..f23fadb6992078 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/VolumeAssertionMapperTest.java @@ -0,0 +1,207 @@ +package com.linkedin.datahub.graphql.types.assertion; + +import com.linkedin.assertion.AssertionStdOperator; +import com.linkedin.assertion.AssertionStdParameter; +import com.linkedin.assertion.AssertionStdParameterType; +import com.linkedin.assertion.AssertionStdParameters; +import com.linkedin.assertion.AssertionValueChangeType; +import com.linkedin.assertion.IncrementingSegmentFieldTransformer; +import com.linkedin.assertion.IncrementingSegmentFieldTransformerType; +import com.linkedin.assertion.IncrementingSegmentRowCountChange; +import com.linkedin.assertion.IncrementingSegmentRowCountTotal; +import com.linkedin.assertion.RowCountChange; +import com.linkedin.assertion.RowCountTotal; +import com.linkedin.assertion.VolumeAssertionInfo; +import com.linkedin.assertion.VolumeAssertionType; +import com.linkedin.common.urn.Urn; +import com.linkedin.dataset.DatasetFilter; +import com.linkedin.dataset.DatasetFilterType; +import com.linkedin.schema.SchemaFieldSpec; +import org.testng.Assert; +import org.testng.annotations.Test; + +public class VolumeAssertionMapperTest { + @Test + public void testMapRowCountTotalVolumeAssertionInfo() throws Exception { + VolumeAssertionInfo volumeAssertionInfo = + new VolumeAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(VolumeAssertionType.ROW_COUNT_TOTAL) + .setFilter( + new DatasetFilter().setType(DatasetFilterType.SQL).setSql("WHERE value > 5;")) + .setRowCountTotal( + new RowCountTotal() + .setOperator(AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue("10")))); + + com.linkedin.datahub.graphql.generated.VolumeAssertionInfo result = + VolumeAssertionMapper.mapVolumeAssertionInfo(null, volumeAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.VolumeAssertionType.ROW_COUNT_TOTAL); + Assert.assertEquals( + result.getFilter().getType(), com.linkedin.datahub.graphql.generated.DatasetFilterType.SQL); + Assert.assertEquals(result.getFilter().getSql(), "WHERE value > 5;"); + Assert.assertEquals( + result.getRowCountTotal().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO); + Assert.assertEquals( + result.getRowCountTotal().getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals(result.getRowCountTotal().getParameters().getValue().getValue(), "10"); + } + + @Test + public void testMapRowCountChangeVolumeAssertionInfo() throws Exception { + VolumeAssertionInfo volumeAssertionInfo = + new VolumeAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(VolumeAssertionType.ROW_COUNT_CHANGE) + .setFilter( + new DatasetFilter().setType(DatasetFilterType.SQL).setSql("WHERE value > 5;")) + .setRowCountChange( + new RowCountChange() + .setOperator(AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue("10"))) + .setType(AssertionValueChangeType.ABSOLUTE)); + + com.linkedin.datahub.graphql.generated.VolumeAssertionInfo result = + VolumeAssertionMapper.mapVolumeAssertionInfo(null, volumeAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.VolumeAssertionType.ROW_COUNT_CHANGE); + Assert.assertEquals( + result.getFilter().getType(), com.linkedin.datahub.graphql.generated.DatasetFilterType.SQL); + Assert.assertEquals(result.getFilter().getSql(), "WHERE value > 5;"); + Assert.assertEquals( + result.getRowCountChange().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO); + Assert.assertEquals( + result.getRowCountChange().getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals(result.getRowCountChange().getParameters().getValue().getValue(), "10"); + Assert.assertEquals( + result.getRowCountChange().getType(), + com.linkedin.datahub.graphql.generated.AssertionValueChangeType.ABSOLUTE); + } + + @Test + public void testMapIncrementingSegmentRowCountTotalVolumeAssertionInfo() throws Exception { + VolumeAssertionInfo volumeAssertionInfo = + new VolumeAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(VolumeAssertionType.INCREMENTING_SEGMENT_ROW_COUNT_TOTAL) + .setIncrementingSegmentRowCountTotal( + new IncrementingSegmentRowCountTotal() + .setOperator(AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue("10"))) + .setSegment( + new com.linkedin.assertion.IncrementingSegmentSpec() + .setField( + new SchemaFieldSpec() + .setPath("path") + .setNativeType("VARCHAR") + .setType("STRING")) + .setTransformer( + new IncrementingSegmentFieldTransformer() + .setType(IncrementingSegmentFieldTransformerType.CEILING) + .setNativeType("CEILING")))); + + com.linkedin.datahub.graphql.generated.VolumeAssertionInfo result = + VolumeAssertionMapper.mapVolumeAssertionInfo(null, volumeAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.VolumeAssertionType + .INCREMENTING_SEGMENT_ROW_COUNT_TOTAL); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getParameters().getValue().getValue(), "10"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getSegment().getField().getPath(), "path"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getSegment().getField().getNativeType(), + "VARCHAR"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getSegment().getField().getType(), "STRING"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getSegment().getTransformer().getType(), + com.linkedin.datahub.graphql.generated.IncrementingSegmentFieldTransformerType.CEILING); + Assert.assertEquals( + result.getIncrementingSegmentRowCountTotal().getSegment().getTransformer().getNativeType(), + "CEILING"); + } + + @Test + public void testMapIncrementingSegmentRowCountChangeVolumeAssertionInfo() throws Exception { + VolumeAssertionInfo volumeAssertionInfo = + new VolumeAssertionInfo() + .setEntity(new Urn("urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)")) + .setType(VolumeAssertionType.INCREMENTING_SEGMENT_ROW_COUNT_CHANGE) + .setIncrementingSegmentRowCountChange( + new IncrementingSegmentRowCountChange() + .setType(AssertionValueChangeType.ABSOLUTE) + .setOperator(AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setType(AssertionStdParameterType.NUMBER) + .setValue("10"))) + .setSegment( + new com.linkedin.assertion.IncrementingSegmentSpec() + .setField( + new SchemaFieldSpec() + .setPath("path") + .setNativeType("VARCHAR") + .setType("STRING")))); + + com.linkedin.datahub.graphql.generated.VolumeAssertionInfo result = + VolumeAssertionMapper.mapVolumeAssertionInfo(null, volumeAssertionInfo); + Assert.assertEquals(result.getEntityUrn(), "urn:li:dataset:(urn:li:dataPlatform:foo,bar,baz)"); + Assert.assertEquals( + result.getType(), + com.linkedin.datahub.graphql.generated.VolumeAssertionType + .INCREMENTING_SEGMENT_ROW_COUNT_CHANGE); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getType(), + com.linkedin.datahub.graphql.generated.AssertionValueChangeType.ABSOLUTE); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.GREATER_THAN_OR_EQUAL_TO); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getParameters().getValue().getType(), + com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getParameters().getValue().getValue(), "10"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getSegment().getField().getPath(), "path"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getSegment().getField().getNativeType(), + "VARCHAR"); + Assert.assertEquals( + result.getIncrementingSegmentRowCountChange().getSegment().getField().getType(), "STRING"); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java index 54b341fc1865a5..ef69278df61a7a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java @@ -8,7 +8,8 @@ public class EmbedMapperTest { @Test public void testEmbedMapper() throws Exception { final String renderUrl = "https://www.google.com"; - final Embed result = EmbedMapper.map(new com.linkedin.common.Embed().setRenderUrl(renderUrl)); + final Embed result = + EmbedMapper.map(null, new com.linkedin.common.Embed().setRenderUrl(renderUrl)); Assert.assertEquals(result.getRenderUrl(), renderUrl); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 3ff4e43ca112c4..6d8f2e1b3ecced 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,6 +1,9 @@ - package com.linkedin.datahub.graphql.types.container; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,12 +29,12 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.container.ContainerProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -41,58 +44,68 @@ import com.linkedin.metadata.key.ContainerKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class ContainerTypeTest { private static final String TEST_CONTAINER_1_URN = "urn:li:container:guid-1"; - private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey() - .setGuid("guid-1"); - private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = new ContainerProperties() - .setDescription("test description") - .setName("Test Container"); - private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = new EditableContainerProperties() - .setDescription("test editable description"); - private static final Ownership TEST_CONTAINER_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); - private static final Status TEST_CONTAINER_1_STATUS = new Status() - .setRemoved(false); - private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = new SubTypes() - .setTypeNames(new StringArray(ImmutableList.of("Database"))); - private static final GlobalTags TEST_CONTAINER_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); - private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = new com.linkedin.container.Container() - .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); + private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey().setGuid("guid-1"); + private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = + new ContainerProperties().setDescription("test description").setName("Test Container"); + private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = + new EditableContainerProperties().setDescription("test editable description"); + private static final Ownership TEST_CONTAINER_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); + private static final Status TEST_CONTAINER_1_STATUS = new Status().setRemoved(false); + private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("Database"))); + private static final GlobalTags TEST_CONTAINER_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = + new com.linkedin.container.Container() + .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); private static final String TEST_CONTAINER_2_URN = "urn:li:container:guid-2"; @Test public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); + EntityClient client = mock(EntityClient.class); Urn containerUrn1 = Urn.createFromString(TEST_CONTAINER_1_URN); Urn containerUrn2 = Urn.createFromString(TEST_CONTAINER_2_URN); @@ -100,73 +113,68 @@ public void testBatchLoad() throws Exception { Map container1Aspects = new HashMap<>(); container1Aspects.put( Constants.CONTAINER_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data()))); container1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data()))); container1Aspects.put( Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data()))); container1Aspects.put( Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data()))); container1Aspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data()))); container1Aspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data()))); container1Aspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data()))); container1Aspects.put( Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data()))); container1Aspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data()))); container1Aspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data()))); container1Aspects.put( Constants.CONTAINER_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - containerUrn1, - new EntityResponse() - .setEntityName(Constants.CONTAINER_ENTITY_NAME) - .setUrn(containerUrn1) - .setAspects(new EnvelopedAspectMap(container1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + containerUrn1, + new EntityResponse() + .setEntityName(Constants.CONTAINER_ENTITY_NAME) + .setUrn(containerUrn1) + .setAspects(new EnvelopedAspectMap(container1Aspects)))); ContainerType type = new ContainerType(client); - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -177,8 +185,12 @@ public void testBatchLoad() throws Exception { assertEquals(container1.getProperties().getDescription(), "test description"); assertEquals(container1.getProperties().getName(), "Test Container"); assertEquals(container1.getInstitutionalMemory().getElements().size(), 1); - assertEquals(container1.getSubTypes().getTypeNames().get(0), TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); - assertEquals(container1.getEditableProperties().getDescription(), TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); + assertEquals( + container1.getSubTypes().getTypeNames().get(0), + TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); + assertEquals( + container1.getEditableProperties().getDescription(), + TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); assertEquals( container1.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_CONTAINER_1_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); @@ -186,8 +198,7 @@ public void testBatchLoad() throws Exception { container1.getTags().getTags().get(0).getTag().getUrn(), TEST_CONTAINER_1_TAGS.getTags().get(0).getTag().toString()); assertEquals( - container1.getContainer().getUrn(), - TEST_CONTAINER_1_CONTAINER.getContainer().toString()); + container1.getContainer().getUrn(), TEST_CONTAINER_1_CONTAINER.getContainer().toString()); // Assert second element is null. assertNull(result.get(1)); @@ -195,18 +206,18 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + EntityClient mockClient = mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); ContainerType type = new ContainerType(mockClient); // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), - context)); + QueryContext context = mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapperTest.java new file mode 100644 index 00000000000000..49f5a985ea4a3c --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractMapperTest.java @@ -0,0 +1,180 @@ +package com.linkedin.datahub.graphql.types.datacontract; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertNull; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringMap; +import com.linkedin.datacontract.DataContractProperties; +import com.linkedin.datacontract.DataContractState; +import com.linkedin.datacontract.DataContractStatus; +import com.linkedin.datacontract.DataQualityContract; +import com.linkedin.datacontract.DataQualityContractArray; +import com.linkedin.datacontract.FreshnessContract; +import com.linkedin.datacontract.FreshnessContractArray; +import com.linkedin.datacontract.SchemaContract; +import com.linkedin.datacontract.SchemaContractArray; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.metadata.Constants; +import java.util.Collections; +import org.testng.annotations.Test; + +public class DataContractMapperTest { + + @Test + public void testMapAllFields() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:dataContract:1"); + Urn dataQualityAssertionUrn = Urn.createFromString("urn:li:assertion:quality"); + Urn dataQualityAssertionUrn2 = Urn.createFromString("urn:li:assertion:quality2"); + + Urn freshnessAssertionUrn = Urn.createFromString("urn:li:assertion:freshness"); + Urn schemaAssertionUrn = Urn.createFromString("urn:li:assertion:schema"); + Urn datasetUrn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedDataContractProperties = new EnvelopedAspect(); + DataContractProperties dataContractProperties = new DataContractProperties(); + dataContractProperties.setDataQuality( + new DataQualityContractArray( + ImmutableList.of( + new DataQualityContract().setAssertion(dataQualityAssertionUrn), + new DataQualityContract().setAssertion(dataQualityAssertionUrn2)))); + dataContractProperties.setFreshness( + new FreshnessContractArray( + ImmutableList.of(new FreshnessContract().setAssertion(freshnessAssertionUrn)))); + dataContractProperties.setSchema( + new SchemaContractArray( + ImmutableList.of(new SchemaContract().setAssertion(schemaAssertionUrn)))); + + dataContractProperties.setEntity(datasetUrn); + + envelopedDataContractProperties.setValue(new Aspect(dataContractProperties.data())); + + EnvelopedAspect envelopedDataContractStatus = new EnvelopedAspect(); + DataContractStatus status = new DataContractStatus(); + status.setState(DataContractState.PENDING); + status.setCustomProperties(new StringMap(ImmutableMap.of("key", "value"))); + + envelopedDataContractStatus.setValue(new Aspect(status.data())); + entityResponse.setAspects( + new EnvelopedAspectMap( + Collections.singletonMap( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, envelopedDataContractProperties))); + + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + envelopedDataContractProperties, + Constants.DATA_CONTRACT_STATUS_ASPECT_NAME, + envelopedDataContractStatus))); + + DataContract dataContract = DataContractMapper.mapContract(entityResponse); + assertNotNull(dataContract); + assertEquals(dataContract.getUrn(), urn.toString()); + assertEquals(dataContract.getType(), EntityType.DATA_CONTRACT); + assertEquals( + dataContract.getStatus().getState(), + com.linkedin.datahub.graphql.generated.DataContractState.PENDING); + assertEquals(dataContract.getProperties().getEntityUrn(), datasetUrn.toString()); + assertEquals(dataContract.getProperties().getDataQuality().size(), 2); + assertEquals( + dataContract.getProperties().getDataQuality().get(0).getAssertion().getUrn(), + dataQualityAssertionUrn.toString()); + assertEquals( + dataContract.getProperties().getDataQuality().get(1).getAssertion().getUrn(), + dataQualityAssertionUrn2.toString()); + assertEquals(dataContract.getProperties().getFreshness().size(), 1); + assertEquals( + dataContract.getProperties().getFreshness().get(0).getAssertion().getUrn(), + freshnessAssertionUrn.toString()); + assertEquals(dataContract.getProperties().getSchema().size(), 1); + assertEquals( + dataContract.getProperties().getSchema().get(0).getAssertion().getUrn(), + schemaAssertionUrn.toString()); + } + + @Test + public void testMapRequiredFields() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:dataContract:1"); + Urn datasetUrn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedDataContractProperties = new EnvelopedAspect(); + DataContractProperties dataContractProperties = new DataContractProperties(); + dataContractProperties.setEntity(datasetUrn); + envelopedDataContractProperties.setValue(new Aspect(dataContractProperties.data())); + + EnvelopedAspect envelopedDataContractStatus = new EnvelopedAspect(); + DataContractStatus status = new DataContractStatus(); + status.setState(DataContractState.PENDING); + status.setCustomProperties(new StringMap(ImmutableMap.of("key", "value"))); + + envelopedDataContractStatus.setValue(new Aspect(status.data())); + entityResponse.setAspects( + new EnvelopedAspectMap( + Collections.singletonMap( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, envelopedDataContractProperties))); + + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + envelopedDataContractProperties, + Constants.DATA_CONTRACT_STATUS_ASPECT_NAME, + envelopedDataContractStatus))); + + DataContract dataContract = DataContractMapper.mapContract(entityResponse); + assertNotNull(dataContract); + assertEquals(dataContract.getUrn(), urn.toString()); + assertEquals(dataContract.getType(), EntityType.DATA_CONTRACT); + assertEquals( + dataContract.getStatus().getState(), + com.linkedin.datahub.graphql.generated.DataContractState.PENDING); + assertEquals(dataContract.getProperties().getEntityUrn(), datasetUrn.toString()); + assertNull(dataContract.getProperties().getDataQuality()); + assertNull(dataContract.getProperties().getSchema()); + assertNull(dataContract.getProperties().getFreshness()); + } + + @Test + public void testMapNoStatus() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:dataContract:1"); + Urn datasetUrn = + Urn.createFromString("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedDataContractProperties = new EnvelopedAspect(); + DataContractProperties dataContractProperties = new DataContractProperties(); + dataContractProperties.setEntity(datasetUrn); + envelopedDataContractProperties.setValue(new Aspect(dataContractProperties.data())); + + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, envelopedDataContractProperties))); + + DataContract dataContract = DataContractMapper.mapContract(entityResponse); + assertNotNull(dataContract); + assertEquals(dataContract.getUrn(), urn.toString()); + assertEquals(dataContract.getType(), EntityType.DATA_CONTRACT); + assertNull(dataContract.getStatus()); + assertEquals(dataContract.getProperties().getEntityUrn(), datasetUrn.toString()); + assertNull(dataContract.getProperties().getDataQuality()); + assertNull(dataContract.getProperties().getSchema()); + assertNull(dataContract.getProperties().getFreshness()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractTypeTest.java new file mode 100644 index 00000000000000..241775e5ab48cc --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/datacontract/DataContractTypeTest.java @@ -0,0 +1,152 @@ +package com.linkedin.datahub.graphql.types.datacontract; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.nullable; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datacontract.DataContractProperties; +import com.linkedin.datacontract.DataContractState; +import com.linkedin.datacontract.DataContractStatus; +import com.linkedin.datacontract.DataQualityContract; +import com.linkedin.datacontract.DataQualityContractArray; +import com.linkedin.datacontract.FreshnessContract; +import com.linkedin.datacontract.FreshnessContractArray; +import com.linkedin.datacontract.SchemaContract; +import com.linkedin.datacontract.SchemaContractArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataContract; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.DataContractKey; +import com.linkedin.r2.RemoteInvocationException; +import graphql.execution.DataFetcherResult; +import io.datahubproject.metadata.context.OperationContext; +import java.util.HashSet; +import java.util.List; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class DataContractTypeTest { + + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:snowflake,test,PROD)"); + private static final Urn DATA_QUALITY_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:quality"); + private static final Urn FRESHNESS_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:freshness"); + private static final Urn SCHEMA_ASSERTION_URN = UrnUtils.getUrn("urn:li:assertion:schema"); + private static final String TEST_DATA_CONTRACT_1_URN = "urn:li:dataContract:id-1"; + private static final DataContractKey TEST_DATA_CONTRACT_1_KEY = + new DataContractKey().setId("id-1"); + private static final DataContractProperties TEST_DATA_CONTRACT_1_PROPERTIES = + new DataContractProperties() + .setEntity(TEST_DATASET_URN) + .setDataQuality( + new DataQualityContractArray( + ImmutableList.of( + new DataQualityContract().setAssertion(DATA_QUALITY_ASSERTION_URN)))) + .setFreshness( + new FreshnessContractArray( + ImmutableList.of(new FreshnessContract().setAssertion(FRESHNESS_ASSERTION_URN)))) + .setSchema( + new SchemaContractArray( + ImmutableList.of(new SchemaContract().setAssertion(SCHEMA_ASSERTION_URN)))); + private static final DataContractStatus TEST_DATA_CONTRACT_1_STATUS = + new DataContractStatus().setState(DataContractState.ACTIVE); + + private static final String TEST_DATA_CONTRACT_2_URN = "urn:li:dataContract:id-2"; + + @Test + public void testBatchLoad() throws Exception { + + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataContractUrn1 = Urn.createFromString(TEST_DATA_CONTRACT_1_URN); + Urn dataContractUrn2 = Urn.createFromString(TEST_DATA_CONTRACT_2_URN); + + Mockito.when( + client.batchGetV2( + any(OperationContext.class), + Mockito.eq(Constants.DATA_CONTRACT_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(dataContractUrn1, dataContractUrn2))), + Mockito.eq(DataContractType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + dataContractUrn1, + new EntityResponse() + .setEntityName(Constants.DATA_CONTRACT_ENTITY_NAME) + .setUrn(dataContractUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATA_CONTRACT_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATA_CONTRACT_1_KEY.data())), + Constants.DATA_CONTRACT_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATA_CONTRACT_1_PROPERTIES.data())), + Constants.DATA_CONTRACT_STATUS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATA_CONTRACT_1_STATUS.data()))))))); + + DataContractType type = new DataContractType(client); + + QueryContext mockContext = getMockAllowContext(); + List> result = + type.batchLoad( + ImmutableList.of(TEST_DATA_CONTRACT_1_URN, TEST_DATA_CONTRACT_2_URN), mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(OperationContext.class), + Mockito.eq(Constants.DATA_CONTRACT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataContractUrn1, dataContractUrn2)), + Mockito.eq(DataContractType.ASPECTS_TO_FETCH)); + + assertEquals(result.size(), 2); + + DataContract dataContract1 = result.get(0).getData(); + assertEquals(dataContract1.getUrn(), TEST_DATA_CONTRACT_1_URN); + assertEquals(dataContract1.getType(), EntityType.DATA_CONTRACT); + assertEquals(dataContract1.getProperties().getEntityUrn(), TEST_DATASET_URN.toString()); + assertEquals(dataContract1.getProperties().getDataQuality().size(), 1); + assertEquals(dataContract1.getProperties().getSchema().size(), 1); + assertEquals(dataContract1.getProperties().getFreshness().size(), 1); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + nullable(OperationContext.class), + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet()); + DataContractType type = new DataContractType(mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATA_CONTRACT_1_URN, TEST_DATA_CONTRACT_2_URN), context)); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 9b6e11fd0b3a45..767ed5c6a14729 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -1,22 +1,25 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; -import com.linkedin.common.Ownership; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Owner; -import com.linkedin.common.OwnershipType; +import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlobalTags; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.Status; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; -import com.linkedin.common.Status; import com.linkedin.common.url.Url; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; @@ -33,185 +36,181 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; -import org.mockito.Mockito; -import org.testng.annotations.Test; - +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataPlatformInstanceTest { - private static final Urn TEST_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); - - private static final String TEST_DATAPLATFORMINSTANCE_1_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; - - private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY - = new DataPlatformInstanceKey() - .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) - .setInstance("I1"); - - private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES - = new DataPlatformInstanceProperties() - .setDescription("test description") - .setName("Test Data Platform Instance"); - - private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = new Deprecation() - .setDeprecated(true) - .setActor(TEST_ACTOR_URN) - .setNote("legacy"); - - private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(TEST_ACTOR_URN)))); - - private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); - - private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - - private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status() - .setRemoved(false); - - private static final String TEST_DATAPLATFORMINSTANCE_2_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; - - @Test - public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - - Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); - Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); - - Map dataPlatformInstance1Aspects = new HashMap<>(); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - dataPlatformInstance1Urn, - new EntityResponse() - .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) - .setUrn(dataPlatformInstance1Urn) - .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); - - DataPlatformInstanceType type = new DataPlatformInstanceType(client); - - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); - List> result = type.batchLoad( - ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), mockContext); - - // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( + private static final Urn TEST_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); + + private static final String TEST_DATAPLATFORMINSTANCE_1_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; + + private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY = + new DataPlatformInstanceKey() + .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) + .setInstance("I1"); + + private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES = + new DataPlatformInstanceProperties() + .setDescription("test description") + .setName("Test Data Platform Instance"); + + private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = + new Deprecation().setDeprecated(true).setActor(TEST_ACTOR_URN).setNote("legacy"); + + private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setType(OwnershipType.DATAOWNER).setOwner(TEST_ACTOR_URN)))); + + private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); + + private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + + private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status().setRemoved(false); + + private static final String TEST_DATAPLATFORMINSTANCE_2_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; + + @Test + public void testBatchLoad() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); + Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); + + Map dataPlatformInstance1Aspects = new HashMap<>(); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data()))); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data()))); + dataPlatformInstance1Aspects.put( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data()))); + dataPlatformInstance1Aspects.put( + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data()))); + dataPlatformInstance1Aspects.put( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data()))); + dataPlatformInstance1Aspects.put( + Constants.GLOBAL_TAGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data()))); + dataPlatformInstance1Aspects.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data()))); + Mockito.when( + client.batchGetV2( + any(), Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); - - assertEquals(result.size(), 2); - - DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); - assertEquals( - dataPlatformInstance1.getUrn(), - TEST_DATAPLATFORMINSTANCE_1_URN - ); - assertEquals( - dataPlatformInstance1.getType(), - EntityType.DATA_PLATFORM_INSTANCE - ); - assertEquals( - dataPlatformInstance1.getProperties().getDescription(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription() - ); - assertEquals( - dataPlatformInstance1.getProperties().getName(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getDeprecated(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getNote(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getActor(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString() - ); - assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); - assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); - assertEquals( - dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), - TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString() - ); - assertEquals( - dataPlatformInstance1.getStatus().getRemoved(), - TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue() - ); - - // Assert second element is null. - assertNull(result.get(1)); - } - - @Test - public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type - = new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType(mockClient); - - // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of( - TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), context)); - } + Mockito.eq( + new HashSet<>( + ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + dataPlatformInstance1Urn, + new EntityResponse() + .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .setUrn(dataPlatformInstance1Urn) + .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); + + DataPlatformInstanceType type = new DataPlatformInstanceType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.userContextNoSearchAuthorization(TEST_ACTOR_URN)); + + List> result = + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH)); + + assertEquals(result.size(), 2); + + DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); + assertEquals(dataPlatformInstance1.getUrn(), TEST_DATAPLATFORMINSTANCE_1_URN); + assertEquals(dataPlatformInstance1.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals( + dataPlatformInstance1.getProperties().getDescription(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription()); + assertEquals( + dataPlatformInstance1.getProperties().getName(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName()); + assertEquals( + dataPlatformInstance1.getDeprecation().getDeprecated(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue()); + assertEquals( + dataPlatformInstance1.getDeprecation().getNote(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote()); + assertEquals( + dataPlatformInstance1.getDeprecation().getActor(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString()); + assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); + assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); + assertEquals( + dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), + TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString()); + assertEquals( + dataPlatformInstance1.getStatus().getRemoved(), + TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type = + new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType( + mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + context)); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 3d22f1c429fd60..8bfbdbe282ad65 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -2,152 +2,185 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; public class DatasetMapperTest { - private static final Urn TEST_DATASET_URN = Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); - private static final Urn TEST_CREATED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); - private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); - - @Test - public void testDatasetPropertiesMapperWithCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - input.setQualifiedName("Test QualifiedName"); - - final TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - final TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setQualifiedName("Test QualifiedName"); - expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); - expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - Assert.assertEquals(actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); - expectedDatasetProperties.setCreated(null); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutTimestampActors() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } + private static final Urn TEST_DATASET_URN = + Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); + private static final Urn TEST_CREATED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); + private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); + + @Test + public void testDatasetPropertiesMapperWithCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + input.setQualifiedName("Test QualifiedName"); + + final TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + final TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(null, response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setQualifiedName("Test QualifiedName"); + expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); + expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); + expectedDatasetProperties.setLastModified( + new AuditStamp(20L, TEST_LAST_MODIFIED_ACTOR_URN.toString())); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + Assert.assertEquals( + actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(null, response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(new AuditStamp(0L, null)); + expectedDatasetProperties.setCreated(null); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutTimestampActors() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(null, response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(new AuditStamp(20L, null)); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 78cdaa0a276da0..42220091f5853b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -19,90 +19,128 @@ public void testMapperFullProfile() { input.setRowCount(10L); input.setColumnCount(45L); input.setSizeInBytes(15L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setMax("1") - .setMean("2") - .setStdev("3") - .setMedian("4") - .setMin("5") - .setNullCount(20L) - .setNullProportion(20.5f) - .setUniqueCount(30L) - .setUniqueProportion(30.5f) - .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setNullCount(30L) - .setNullProportion(30.5f) - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - .setSampleValues(new StringArray(ImmutableList.of("val3", "val4"))) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setMax("1") + .setMean("2") + .setStdev("3") + .setMedian("4") + .setMin("5") + .setNullCount(20L) + .setNullProportion(20.5f) + .setUniqueCount(30L) + .setUniqueProportion(30.5f) + .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setNullCount(30L) + .setNullProportion(30.5f) + .setUniqueCount(40L) + .setUniqueProportion(40.5f) + .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); expected.setSizeInBytes(15L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - 20L, - 20.5f, - "5", - "1", - "2", - "4", - "3", - new ArrayList<>(ImmutableList.of("val1", "val2"))), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - 30L, - 30.5f, - "6", - "2", - "3", - "5", - "4", - new ArrayList<>(ImmutableList.of("val3", "val4"))) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", + 30L, + 30.5f, + 20L, + 20.5f, + "5", + "1", + "2", + "4", + "3", + new ArrayList<>(ImmutableList.of("val1", "val2"))), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", + 40L, + 40.5f, + 30L, + 30.5f, + "6", + "2", + "3", + "5", + "4", + new ArrayList<>(ImmutableList.of("val3", "val4")))))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } @Test @@ -111,77 +149,95 @@ public void testMapperPartialProfile() { input.setTimestampMillis(1L); input.setRowCount(10L); input.setColumnCount(45L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setUniqueCount(30L) - .setUniqueProportion(30.5f), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setUniqueCount(30L) + .setUniqueProportion(30.5f), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setUniqueCount(40L) + .setUniqueProportion(40.5f)))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - null, - null, - null, - null, - null, - null, - null, - null), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - null, - null, - "6", - "2", - "3", - "5", - "4", - null) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", 30L, 30.5f, null, null, null, null, null, null, null, null), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", 40L, 40.5f, null, null, "6", "2", "3", "5", "4", null)))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java index 48c23f436f875e..d7463f1da2f81b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,33 +34,34 @@ import java.util.HashSet; import java.util.List; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class DomainTypeTest { private static final String TEST_DOMAIN_1_URN = "urn:li:domain:id-1"; - private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey() - .setId("id-1"); - private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = new DomainProperties() - .setDescription("test description") - .setName("Test Domain"); - private static final Ownership TEST_DOMAIN_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey().setId("id-1"); + private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = + new DomainProperties().setDescription("test description").setName("Test Domain"); + private static final Ownership TEST_DOMAIN_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); private static final String TEST_DOMAIN_2_URN = "urn:li:domain:id-2"; @@ -68,39 +73,48 @@ public void testBatchLoad() throws Exception { Urn domainUrn1 = Urn.createFromString(TEST_DOMAIN_1_URN); Urn domainUrn2 = Urn.createFromString(TEST_DOMAIN_2_URN); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - domainUrn1, - new EntityResponse() - .setEntityName(Constants.DOMAIN_ENTITY_NAME) - .setUrn(domainUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data())) - ))))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), + Mockito.eq(DomainType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + domainUrn1, + new EntityResponse() + .setEntityName(Constants.DOMAIN_ENTITY_NAME) + .setUrn(domainUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data()))))))); DomainType type = new DomainType(client); QueryContext mockContext = getMockAllowContext(); - List> result = type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), + Mockito.eq(DomainType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -120,17 +134,16 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); DomainType type = new DomainType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java new file mode 100644 index 00000000000000..0e2b78a9368ee4 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java @@ -0,0 +1,117 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.GlobalTags; +import com.linkedin.common.TagAssociationArray; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.TagUrn; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import java.util.Collections; +import org.testng.annotations.Test; + +public class IncidentMapperTest { + + @Test + public void testMap() throws Exception { + EntityResponse entityResponse = new EntityResponse(); + Urn urn = Urn.createFromString("urn:li:incident:1"); + Urn userUrn = Urn.createFromString("urn:li:corpuser:test"); + Urn assertionUrn = Urn.createFromString("urn:li:assertion:test"); + entityResponse.setUrn(urn); + + EnvelopedAspect envelopedIncidentInfo = new EnvelopedAspect(); + IncidentInfo incidentInfo = new IncidentInfo(); + incidentInfo.setType(IncidentType.OPERATIONAL); + incidentInfo.setCustomType("Custom Type"); + incidentInfo.setTitle("Test Incident", SetMode.IGNORE_NULL); + incidentInfo.setDescription("This is a test incident", SetMode.IGNORE_NULL); + incidentInfo.setPriority(1, SetMode.IGNORE_NULL); + incidentInfo.setEntities(new UrnArray(Collections.singletonList(urn))); + + IncidentSource source = new IncidentSource(); + source.setType(IncidentSourceType.MANUAL); + source.setSourceUrn(assertionUrn); + incidentInfo.setSource(source); + + AuditStamp lastStatus = new AuditStamp(); + lastStatus.setTime(1000L); + lastStatus.setActor(userUrn); + incidentInfo.setCreated(lastStatus); + + IncidentStatus status = new IncidentStatus(); + status.setState(IncidentState.ACTIVE); + status.setLastUpdated(lastStatus); + status.setMessage("This incident is open.", SetMode.IGNORE_NULL); + incidentInfo.setStatus(status); + + AuditStamp created = new AuditStamp(); + created.setTime(1000L); + created.setActor(userUrn); + incidentInfo.setCreated(created); + + envelopedIncidentInfo.setValue(new Aspect(incidentInfo.data())); + + EnvelopedAspect envelopedTagsAspect = new EnvelopedAspect(); + GlobalTags tags = new GlobalTags(); + tags.setTags( + new TagAssociationArray( + new TagAssociationArray( + Collections.singletonList( + new com.linkedin.common.TagAssociation() + .setTag(TagUrn.createFromString("urn:li:tag:test")))))); + envelopedTagsAspect.setValue(new Aspect(tags.data())); + + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INCIDENT_INFO_ASPECT_NAME, envelopedIncidentInfo, + Constants.GLOBAL_TAGS_ASPECT_NAME, envelopedTagsAspect))); + + Incident incident = IncidentMapper.map(null, entityResponse); + + assertNotNull(incident); + assertEquals(incident.getUrn(), "urn:li:incident:1"); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getCustomType(), "Custom Type"); + assertEquals( + incident.getIncidentType().toString(), + com.linkedin.datahub.graphql.generated.IncidentType.OPERATIONAL.toString()); + assertEquals(incident.getTitle(), "Test Incident"); + assertEquals(incident.getDescription(), "This is a test incident"); + assertEquals(incident.getPriority().intValue(), 1); + assertEquals( + incident.getSource().getType().toString(), + com.linkedin.datahub.graphql.generated.IncidentSourceType.MANUAL.toString()); + assertEquals(incident.getSource().getSource().getUrn(), assertionUrn.toString()); + assertEquals( + incident.getStatus().getState().toString(), + com.linkedin.datahub.graphql.generated.IncidentState.ACTIVE.toString()); + assertEquals(incident.getStatus().getMessage(), "This incident is open."); + assertEquals(incident.getStatus().getLastUpdated().getTime().longValue(), 1000L); + assertEquals(incident.getStatus().getLastUpdated().getActor(), userUrn.toString()); + assertEquals(incident.getCreated().getTime().longValue(), 1000L); + assertEquals(incident.getCreated().getActor(), userUrn.toString()); + + assertEquals(incident.getTags().getTags().size(), 1); + assertEquals( + incident.getTags().getTags().get(0).getTag().getUrn().toString(), "urn:li:tag:test"); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java new file mode 100644 index 00000000000000..abe2e3034f8d95 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java @@ -0,0 +1,175 @@ +package com.linkedin.datahub.graphql.types.incident; + +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Incident; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.incident.IncidentInfo; +import com.linkedin.incident.IncidentSource; +import com.linkedin.incident.IncidentSourceType; +import com.linkedin.incident.IncidentState; +import com.linkedin.incident.IncidentStatus; +import com.linkedin.incident.IncidentType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.r2.RemoteInvocationException; +import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import org.mockito.Mockito; +import org.testng.annotations.Test; + +public class IncidentTypeTest { + + private static final String TEST_INCIDENT_URN = "urn:li:incident:guid-1"; + private static Urn testAssertionUrn; + private static Urn testUserUrn; + private static Urn testDatasetUrn; + + static { + try { + testAssertionUrn = Urn.createFromString("urn:li:assertion:test"); + testUserUrn = Urn.createFromString("urn:li:corpuser:test"); + testDatasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); + } catch (Exception ignored) { + // ignored + } + } + + private static final IncidentKey TEST_INCIDENT_KEY = new IncidentKey().setId("guid-1"); + private static final IncidentInfo TEST_INCIDENT_INFO = + new IncidentInfo() + .setType(IncidentType.OPERATIONAL) + .setCustomType("Custom Type") + .setDescription("Description") + .setPriority(5) + .setTitle("Title") + .setEntities(new UrnArray(ImmutableList.of(testDatasetUrn))) + .setSource( + new IncidentSource() + .setType(IncidentSourceType.MANUAL) + .setSourceUrn(testAssertionUrn)) + .setStatus( + new IncidentStatus() + .setState(IncidentState.ACTIVE) + .setMessage("Message") + .setLastUpdated(new AuditStamp().setTime(1L).setActor(testUserUrn))) + .setCreated(new AuditStamp().setTime(0L).setActor(testUserUrn)); + private static final String TEST_INCIDENT_URN_2 = "urn:li:incident:guid-2"; + + @Test + public void testBatchLoad() throws Exception { + + EntityClient client = Mockito.mock(EntityClient.class); + + Urn incidentUrn1 = Urn.createFromString(TEST_INCIDENT_URN); + Urn incidentUrn2 = Urn.createFromString(TEST_INCIDENT_URN_2); + + Map incident1Aspects = new HashMap<>(); + incident1Aspects.put( + Constants.INCIDENT_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_KEY.data()))); + incident1Aspects.put( + Constants.INCIDENT_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_INCIDENT_INFO.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(incidentUrn1, incidentUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + incidentUrn1, + new EntityResponse() + .setEntityName(Constants.INCIDENT_ENTITY_NAME) + .setUrn(incidentUrn1) + .setAspects(new EnvelopedAspectMap(incident1Aspects)))); + + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.INCIDENT_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(incidentUrn1, incidentUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.incident.IncidentType.ASPECTS_TO_FETCH)); + + assertEquals(result.size(), 2); + + Incident incident = result.get(0).getData(); + assertEquals(incident.getUrn(), TEST_INCIDENT_URN.toString()); + assertEquals(incident.getType(), EntityType.INCIDENT); + assertEquals(incident.getIncidentType().toString(), TEST_INCIDENT_INFO.getType().toString()); + assertEquals(incident.getTitle(), TEST_INCIDENT_INFO.getTitle()); + assertEquals(incident.getDescription(), TEST_INCIDENT_INFO.getDescription()); + assertEquals(incident.getCustomType(), TEST_INCIDENT_INFO.getCustomType()); + assertEquals( + incident.getStatus().getState().toString(), + TEST_INCIDENT_INFO.getStatus().getState().toString()); + assertEquals(incident.getStatus().getMessage(), TEST_INCIDENT_INFO.getStatus().getMessage()); + assertEquals( + incident.getStatus().getLastUpdated().getTime(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getTime()); + assertEquals( + incident.getStatus().getLastUpdated().getActor(), + TEST_INCIDENT_INFO.getStatus().getLastUpdated().getActor().toString()); + assertEquals( + incident.getSource().getType().toString(), + TEST_INCIDENT_INFO.getSource().getType().toString()); + assertEquals( + incident.getSource().getSource().getUrn(), + TEST_INCIDENT_INFO.getSource().getSourceUrn().toString()); + assertEquals( + incident.getCreated().getActor(), TEST_INCIDENT_INFO.getCreated().getActor().toString()); + assertEquals(incident.getCreated().getTime(), TEST_INCIDENT_INFO.getCreated().getTime()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + com.linkedin.datahub.graphql.types.incident.IncidentType type = + new com.linkedin.datahub.graphql.types.incident.IncidentType(mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), context)); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java new file mode 100644 index 00000000000000..6d3291736f571b --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java @@ -0,0 +1,73 @@ +package com.linkedin.datahub.graphql.types.mappers; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.MatchedField; +import com.linkedin.metadata.entity.validation.ValidationApiUtils; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.net.URISyntaxException; +import java.util.List; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class MapperUtilsTest { + private EntityRegistry entityRegistry; + + @BeforeTest + public void setup() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + } + + @Test + public void testMatchedFieldValidation() throws URISyntaxException { + final Urn urn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:s3,urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29,PROD)"); + final Urn invalidUrn = + Urn.createFromString( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + assertThrows( + IllegalArgumentException.class, + () -> ValidationApiUtils.validateUrn(entityRegistry, invalidUrn)); + + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization(entityRegistry)); + + List actualMatched = + MapperUtils.getMatchedFieldEntry( + mockContext, + List.of( + buildSearchMatchField(urn.toString()), + buildSearchMatchField(invalidUrn.toString()))); + + assertEquals(actualMatched.size(), 2, "Matched fields should be 2"); + assertEquals( + actualMatched.stream().filter(matchedField -> matchedField.getEntity() != null).count(), + 1, + "With urn should be 1"); + } + + private static com.linkedin.metadata.search.MatchedField buildSearchMatchField( + String highlightValue) { + com.linkedin.metadata.search.MatchedField field = + new com.linkedin.metadata.search.MatchedField(); + field.setName("testField"); + field.setValue(highlightValue); + return field; + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index 918616a2705b7e..f0d2a4f6f7f777 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,16 +34,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.notebook.NotebookCell; -import com.linkedin.notebook.NotebookCellArray; -import com.linkedin.notebook.NotebookCellType; -import com.linkedin.notebook.NotebookContent; -import com.linkedin.notebook.NotebookInfo; -import com.linkedin.notebook.EditableNotebookProperties; -import com.linkedin.notebook.TextCell; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.domain.Domains; import com.linkedin.entity.Aspect; @@ -50,8 +46,16 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.notebook.EditableNotebookProperties; +import com.linkedin.notebook.NotebookCell; +import com.linkedin.notebook.NotebookCellArray; +import com.linkedin.notebook.NotebookCellType; +import com.linkedin.notebook.NotebookContent; +import com.linkedin.notebook.NotebookInfo; +import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -60,58 +64,75 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class NotebookTypeTest { private static final String TEST_NOTEBOOK = "urn:li:notebook:(querybook,123)"; - private static final NotebookKey NOTEBOOK_KEY = new NotebookKey() - .setNotebookId("123") - .setNotebookTool("querybook"); - private static final NotebookContent NOTEBOOK_CONTENT = new NotebookContent() - .setCells(new NotebookCellArray(ImmutableList.of(new NotebookCell() - .setType(NotebookCellType.TEXT_CELL) - .setTextCell(new TextCell() - .setCellId("1234") - .setCellTitle("test cell") - .setText("test text") - .setChangeAuditStamps(new ChangeAuditStamps()))))); - private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = new EditableNotebookProperties() - .setDescription("test editable description"); - private static final Ownership OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - - private static final SubTypes SUB_TYPES = new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); - - private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("test_platform")); - - private static final NotebookInfo NOTEBOOK_INFO = new NotebookInfo() - .setTitle("title") - .setExternalUrl(new Url("https://querybook.com/notebook/123")) - .setChangeAuditStamps(new ChangeAuditStamps()) - .setDescription("test doc"); - - private static final Status STATUS = new Status() - .setRemoved(false); - - private static final Domains DOMAINS = new Domains() - .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); - private static final GlobalTags GLOBAL_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final NotebookKey NOTEBOOK_KEY = + new NotebookKey().setNotebookId("123").setNotebookTool("querybook"); + private static final NotebookContent NOTEBOOK_CONTENT = + new NotebookContent() + .setCells( + new NotebookCellArray( + ImmutableList.of( + new NotebookCell() + .setType(NotebookCellType.TEXT_CELL) + .setTextCell( + new TextCell() + .setCellId("1234") + .setCellTitle("test cell") + .setText("test text") + .setChangeAuditStamps(new ChangeAuditStamps()))))); + private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = + new EditableNotebookProperties().setDescription("test editable description"); + private static final Ownership OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + + private static final SubTypes SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); + + private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("test_platform")); + + private static final NotebookInfo NOTEBOOK_INFO = + new NotebookInfo() + .setTitle("title") + .setExternalUrl(new Url("https://querybook.com/notebook/123")) + .setChangeAuditStamps(new ChangeAuditStamps()) + .setDescription("test doc"); + + private static final Status STATUS = new Status().setRemoved(false); + + private static final Domains DOMAINS = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); + private static final GlobalTags GLOBAL_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); @Test public void testBatchLoad() throws Exception { @@ -121,79 +142,72 @@ public void testBatchLoad() throws Exception { Map notebookAspects = new HashMap<>(); notebookAspects.put( Constants.NOTEBOOK_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data()))); notebookAspects.put( Constants.NOTEBOOK_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data()))); notebookAspects.put( Constants.NOTEBOOK_CONTENT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data()))); notebookAspects.put( Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data()))); notebookAspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data()))); notebookAspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data()))); notebookAspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(STATUS.data())) - ); + Constants.STATUS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(STATUS.data()))); notebookAspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data()))); notebookAspects.put( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(DOMAINS.data())) - ); + Constants.DOMAINS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DOMAINS.data()))); notebookAspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data()))); notebookAspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data())) - ); - notebookAspects.put(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data()))); + notebookAspects.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DATA_PLATFORM_INSTANCE.data()))); Urn notebookUrn = new NotebookUrn("querybook", "123"); Urn dummyNotebookUrn = new NotebookUrn("querybook", "dummy"); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - notebookUrn, - new EntityResponse() - .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) - .setUrn(notebookUrn) - .setAspects(new EnvelopedAspectMap(notebookAspects)))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE))) + .thenReturn( + ImmutableMap.of( + notebookUrn, + new EntityResponse() + .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) + .setUrn(notebookUrn) + .setAspects(new EnvelopedAspectMap(notebookAspects)))); NotebookType type = new NotebookType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> - result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE)); assertEquals(result.size(), 2); @@ -201,13 +215,17 @@ public void testBatchLoad() throws Exception { Notebook notebook = result.get(0).getData(); assertEquals(notebook.getContent().getCells().size(), NOTEBOOK_CONTENT.getCells().size()); - assertEquals(notebook.getContent().getCells().get(0).getType().toString(), + assertEquals( + notebook.getContent().getCells().get(0).getType().toString(), NOTEBOOK_CONTENT.getCells().get(0).getType().toString()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellId(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellId(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellId()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellTitle()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getText(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getText(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getText()); assertEquals(notebook.getInfo().getDescription(), NOTEBOOK_INFO.getDescription()); assertEquals(notebook.getInfo().getExternalUrl(), NOTEBOOK_INFO.getExternalUrl().toString()); @@ -217,11 +235,17 @@ public void testBatchLoad() throws Exception { assertEquals(notebook.getType(), EntityType.NOTEBOOK); assertEquals(notebook.getOwnership().getOwners().size(), 1); assertEquals(notebook.getInstitutionalMemory().getElements().size(), 1); - assertEquals(notebook.getEditableProperties().getDescription(), TEST_EDITABLE_DESCRIPTION.getDescription()); - assertEquals(notebook.getTags().getTags().get(0).getTag().getUrn(), + assertEquals( + notebook.getEditableProperties().getDescription(), + TEST_EDITABLE_DESCRIPTION.getDescription()); + assertEquals( + notebook.getTags().getTags().get(0).getTag().getUrn(), GLOBAL_TAGS.getTags().get(0).getTag().toString()); - assertEquals(notebook.getSubTypes().getTypeNames(), SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); - assertEquals(notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), + assertEquals( + notebook.getSubTypes().getTypeNames(), + SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); + assertEquals( + notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); assertEquals(notebook.getPlatform().getUrn(), DATA_PLATFORM_INSTANCE.getPlatform().toString()); @@ -232,17 +256,15 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), - context)); + assertThrows( + RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index a3c089b91de875..f03ddb192e242e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,82 +1,83 @@ package com.linkedin.datahub.graphql.types.query; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.query.QueryLanguage; -import com.linkedin.query.QueryProperties; -import com.linkedin.query.QuerySource; -import com.linkedin.query.QueryStatement; -import com.linkedin.query.QuerySubject; -import com.linkedin.query.QuerySubjectArray; -import com.linkedin.query.QuerySubjects; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import com.linkedin.query.QuerySubject; +import com.linkedin.query.QuerySubjectArray; +import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class QueryTypeTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); private static final Urn TEST_QUERY_2_URN = UrnUtils.getUrn("urn:li:query:test-2"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); - private static final Urn TEST_DATASET_2_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final QueryProperties TEST_QUERY_PROPERTIES_1 = new QueryProperties() - .setName("Query Name") - .setDescription("Query Description") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ) - )); - private static final QueryProperties TEST_QUERY_PROPERTIES_2 = new QueryProperties() - .setName("Query Name 2") - .setDescription("Query Description 2") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable2") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_2_URN) - ) - )); + private static final QueryProperties TEST_QUERY_PROPERTIES_1 = + new QueryProperties() + .setName("Query Name") + .setDescription("Query Description") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + private static final QueryProperties TEST_QUERY_PROPERTIES_2 = + new QueryProperties() + .setName("Query Name 2") + .setDescription("Query Description 2") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable2")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_2_URN)))); @Test public void testBatchLoad() throws Exception { @@ -87,39 +88,58 @@ public void testBatchLoad() throws Exception { Urn queryUrn2 = TEST_QUERY_2_URN; Map query1Aspects = new HashMap<>(); - query1Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); - query1Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); Map query2Aspects = new HashMap<>(); - query2Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_2.data()))); - query2Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_2.data()))); - Mockito.when(client.batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(queryUrn1, new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)), queryUrn2, - new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn2) - .setAspects(new EnvelopedAspectMap(query2Aspects)))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)), + queryUrn2, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn2) + .setAspects(new EnvelopedAspectMap(query2Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + List> result = - type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response Mockito.verify(client, Mockito.times(1)) - .batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class)); + .batchGetV2( + any(), + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -141,40 +161,42 @@ public void testBatchLoadNullEntity() throws Exception { Map query1Aspects = new HashMap<>(); query1Aspects.put( Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); query1Aspects.put( Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - queryUrn1, - new EntityResponse() - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of( - TEST_QUERY_URN.toString(), - TEST_QUERY_2_URN.toString()), - mockContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), - Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -188,18 +210,19 @@ public void testBatchLoadNullEntity() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); QueryType type = new QueryType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), context)); } private void verifyQuery1(QueryEntity query) { @@ -207,14 +230,30 @@ private void verifyQuery1(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_1.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_1.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_1.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_1.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_1.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_1.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_1.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_1.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); } private void verifyQuery2(QueryEntity query) { @@ -222,13 +261,29 @@ private void verifyQuery2(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_2.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_2.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_2.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_2.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_2.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_2.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_2.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_2.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index 7f3c8f99f6593a..685cccf27ccc03 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,114 +1,151 @@ package com.linkedin.datahub.graphql.types.view; +import static org.mockito.ArgumentMatchers.any; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubView; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.DataHubView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.entity.Aspect; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; -import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class DataHubViewTypeTest { private static final String TEST_VIEW_URN = "urn:li:dataHubView:test"; private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + /** * A Valid View is one which is minted by the createView or updateView GraphQL resolvers. * - * View Definitions currently support a limited Filter structure, which includes a single Logical filter set. - * Either a set of OR criteria with 1 value in each nested "and", or a single OR criteria with a set of nested ANDs. + *

View Definitions currently support a limited Filter structure, which includes a single + * Logical filter set. Either a set of OR criteria with 1 value in each nested "and", or a single + * OR criteria with a set of nested ANDs. * - * This enables us to easily support merging more complex View predicates in the future without a data migration, - * should the need arise. + *

This enables us to easily support merging more complex View predicates in the future without + * a data migration, should the need arise. */ - private static final DataHubViewInfo TEST_VALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_VALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); /** - * An Invalid View is on which has been ingested manually, which should not occur under normal operation of DataHub. + * An Invalid View is on which has been ingested manually, which should not occur under normal + * operation of DataHub. * - * This would be a complex view with multiple OR and nested AND predicates. + *

This would be a complex view with multiple OR and nested AND predicates. */ - private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))), - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); private static final String TEST_VIEW_URN_2 = "urn:li:dataHubView:test2"; @@ -123,33 +160,40 @@ public void testBatchLoadValidView() throws Exception { Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - viewUrn1, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(viewUrn1) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + viewUrn1, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(viewUrn1) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 2); @@ -164,9 +208,12 @@ public void testBatchLoadValidView() throws Exception { assertEquals(view.getDefinition().getEntityTypes().get(1), EntityType.DASHBOARD); assertEquals(view.getDefinition().getFilter().getOperator(), LogicalOperator.AND); assertEquals(view.getDefinition().getFilter().getFilters().size(), 1); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); assertEquals(view.getDefinition().getFilter().getFilters().get(0).getField(), "test"); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getValues(), ImmutableList.of("value1", "value2")); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getValues(), + ImmutableList.of("value1", "value2")); // Assert second element is null. assertNull(result.get(1)); @@ -174,40 +221,48 @@ public void testBatchLoadValidView() throws Exception { @Test public void testBatchLoadInvalidView() throws Exception { - // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log a warning). + // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log + // a warning). EntityClient client = Mockito.mock(EntityClient.class); Urn invalidViewUrn = Urn.createFromString(TEST_VIEW_URN); Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - invalidViewUrn, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(invalidViewUrn) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH))) + .thenReturn( + ImmutableMap.of( + invalidViewUrn, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(invalidViewUrn) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization()); + + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(invalidViewUrn)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + any(), + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(invalidViewUrn)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH)); assertEquals(result.size(), 1); @@ -227,17 +282,17 @@ public void testBatchLoadInvalidView() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2(any(), Mockito.anyString(), Mockito.anySet(), Mockito.anySet()); + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java index 0a58ff88586c6d..6ecbc8d015b29a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java @@ -1,57 +1,50 @@ package com.linkedin.datahub.graphql.utils; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.datahub.graphql.util.DateUtil; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - public class DateUtilTest { - private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { - DateTime result = new DateTime() - .withDate(2023, 1, dayOfMonth); - if (zeroTime) { - return new DateUtil().setTimeToZero(result); - } - return result - .withHourOfDay(1) - .withMinuteOfHour(2) - .withSecondOfMinute(3) - .withMillisOfSecond(4); + private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { + DateTime result = new DateTime().withDate(2023, 1, dayOfMonth); + if (zeroTime) { + return new DateUtil().setTimeToZero(result); } + return result.withHourOfDay(1).withMinuteOfHour(2).withSecondOfMinute(3).withMillisOfSecond(4); + } - private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { - assertEquals( - setTimeParts(dayOfMonth, true).getMillis(), - dateUtil.getStartOfNextWeek().getMillis() - ); - } + private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { + assertEquals( + setTimeParts(dayOfMonth, true).getMillis(), dateUtil.getStartOfNextWeek().getMillis()); + } - @Test - public void testStartOfNextWeek() { - DateUtil dateUtil = Mockito.spy(DateUtil.class); + @Test + public void testStartOfNextWeek() { + DateUtil dateUtil = Mockito.spy(DateUtil.class); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); - assertEqualStartOfNextWeek(dateUtil, 9); - } + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); + assertEqualStartOfNextWeek(dateUtil, 9); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java index 48ce2ddb6dde4d..0419fe0b5254d3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.*; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.identity.CorpUserInfo; @@ -7,19 +10,24 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.AssertJUnit.*; - - public class MutationsUtilsTest { @Test public void testBuildMetadataChangeProposal() { - MetadataChangeProposal metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn("urn:li:corpuser:datahub"), CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); - metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithKey(new CorpUserKey().setUsername("datahub"), - CORP_USER_ENTITY_NAME, CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + MetadataChangeProposal metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn("urn:li:corpuser:datahub"), + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithKey( + new CorpUserKey().setUsername("datahub"), + CORP_USER_ENTITY_NAME, + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java new file mode 100644 index 00000000000000..b4097d9dd045df --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/OwnerUtilsTest.java @@ -0,0 +1,110 @@ +package com.linkedin.datahub.graphql.utils; + +import static org.testng.AssertJUnit.*; + +import com.linkedin.common.Owner; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; +import java.net.URISyntaxException; +import org.testng.annotations.Test; + +public class OwnerUtilsTest { + + public static String TECHNICAL_OWNER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__technical_owner"; + public static String BUSINESS_OWNER_OWNERSHIP_TYPE_URN = + "urn:li:ownershipType:__system__business_owner"; + + @Test + public void testMapOwnershipType() { + assertEquals( + OwnerUtils.mapOwnershipTypeToEntity("TECHNICAL_OWNER"), TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + } + + @Test + public void testIsOwnerEqualUrnOnly() throws URISyntaxException { + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + Owner owner1 = new Owner(); + owner1.setOwner(ownerUrn1); + assertTrue(OwnerUtils.isOwnerEqual(owner1, ownerUrn1, null)); + + Urn ownerUrn2 = new Urn("urn:li:corpuser:bar"); + assertFalse(OwnerUtils.isOwnerEqual(owner1, ownerUrn2, null)); + } + + @Test + public void testIsOwnerEqualWithLegacyTypeOnly() throws URISyntaxException { + + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + Owner ownerWithTechnicalOwnership = new Owner(); + ownerWithTechnicalOwnership.setOwner(ownerUrn1); + ownerWithTechnicalOwnership.setType(OwnershipType.TECHNICAL_OWNER); + + assertTrue( + OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + + Owner ownerWithBusinessOwnership = new Owner(); + ownerWithBusinessOwnership.setOwner(ownerUrn1); + ownerWithBusinessOwnership.setType(OwnershipType.BUSINESS_OWNER); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithBusinessOwnership, ownerUrn1, new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN))); + } + + @Test + public void testIsOwnerEqualOnlyOwnershipTypeUrn() throws URISyntaxException { + + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn businessOwnershipTypeUrn = new Urn(BUSINESS_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + + Owner ownerWithTechnicalOwnership = new Owner(); + ownerWithTechnicalOwnership.setOwner(ownerUrn1); + ownerWithTechnicalOwnership.setTypeUrn(technicalOwnershipTypeUrn); + + Owner ownerWithBusinessOwnership = new Owner(); + ownerWithBusinessOwnership.setOwner(ownerUrn1); + ownerWithBusinessOwnership.setTypeUrn(businessOwnershipTypeUrn); + + Owner ownerWithoutOwnershipType = new Owner(); + ownerWithoutOwnershipType.setOwner(ownerUrn1); + ownerWithoutOwnershipType.setType(OwnershipType.NONE); + + assertTrue( + OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual(ownerWithBusinessOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse(OwnerUtils.isOwnerEqual(ownerWithTechnicalOwnership, ownerUrn1, null)); + assertTrue(OwnerUtils.isOwnerEqual(ownerWithoutOwnershipType, ownerUrn1, null)); + } + + public void testIsOwnerEqualWithBothLegacyAndNewType() throws URISyntaxException { + Urn technicalOwnershipTypeUrn = new Urn(TECHNICAL_OWNER_OWNERSHIP_TYPE_URN); + Urn businessOwnershipTypeUrn = new Urn(BUSINESS_OWNER_OWNERSHIP_TYPE_URN); + Urn ownerUrn1 = new Urn("urn:li:corpuser:foo"); + + Owner ownerWithLegacyTechnicalOwnership = new Owner(); + ownerWithLegacyTechnicalOwnership.setOwner(ownerUrn1); + ownerWithLegacyTechnicalOwnership.setType(OwnershipType.TECHNICAL_OWNER); + + assertTrue( + OwnerUtils.isOwnerEqual( + ownerWithLegacyTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithLegacyTechnicalOwnership, ownerUrn1, businessOwnershipTypeUrn)); + + Owner ownerWithNewTechnicalOwnership = new Owner(); + ownerWithLegacyTechnicalOwnership.setOwner(ownerUrn1); + ownerWithLegacyTechnicalOwnership.setTypeUrn(technicalOwnershipTypeUrn); + + assertTrue( + OwnerUtils.isOwnerEqual( + ownerWithNewTechnicalOwnership, ownerUrn1, technicalOwnershipTypeUrn)); + assertFalse( + OwnerUtils.isOwnerEqual( + ownerWithNewTechnicalOwnership, ownerUrn1, businessOwnershipTypeUrn)); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java index adbc6808b5ab9e..005b47df56982e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.types.common.mappers.util.RunInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.entity.EnvelopedAspect; @@ -8,10 +11,6 @@ import java.util.List; import org.testng.annotations.Test; -import static org.testng.Assert.*; - -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtilsTest { private final Long recentLastObserved = 1660056070640L; @@ -21,15 +20,21 @@ public class SystemMetadataUtilsTest { @Test public void testGetLastIngestedTime() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertEquals(lastObserved, mediumLastObserved); @@ -38,15 +43,21 @@ public void testGetLastIngestedTime() { @Test public void testGetLastIngestedRunId() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); String lastRunId = SystemMetadataUtils.getLastIngestedRunId(aspectMap); assertEquals(lastRunId, "real-id-1"); @@ -55,15 +66,21 @@ public void testGetLastIngestedRunId() { @Test public void testGetLastIngestedRuns() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); List runs = SystemMetadataUtils.getLastIngestionRuns(aspectMap); @@ -75,15 +92,23 @@ public void testGetLastIngestedRuns() { @Test public void testGetLastIngestedTimeAllDefaultRunIds() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("default-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved) - )); - aspectMap.put("default-run-id3", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "default-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved))); + aspectMap.put( + "default-run-id3", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertNull(lastObserved, null); diff --git a/datahub-graphql-core/src/test/resources/test-entity-registry.yaml b/datahub-graphql-core/src/test/resources/test-entity-registry.yaml index efd75a7fb07f51..4df822377ddf2b 100644 --- a/datahub-graphql-core/src/test/resources/test-entity-registry.yaml +++ b/datahub-graphql-core/src/test/resources/test-entity-registry.yaml @@ -293,6 +293,14 @@ entities: aspects: - ownershipTypeInfo - status +- name: businessAttribute + category: core + keyAspect: businessAttributeKey + aspects: + - businessAttributeInfo + - status + - ownership + - institutionalMemory - name: dataContract category: core keyAspect: dataContractKey @@ -300,4 +308,9 @@ entities: - dataContractProperties - dataContractStatus - status +- name: schemaField + category: core + keyAspect: schemaFieldKey + aspects: + - businessAttributes events: diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 5d0edf3ee8427c..f64886953fe225 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -14,7 +14,7 @@ ext { dependencies { implementation project(':metadata-io') implementation project(':metadata-service:factories') - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:configuration') implementation project(':metadata-dao-impl:kafka-producer') implementation externalDependency.charle @@ -24,7 +24,7 @@ dependencies { exclude group: 'net.minidev', module: 'json-smart' exclude group: 'com.nimbusds', module: 'nimbus-jose-jwt' exclude group: "org.apache.htrace", module: "htrace-core4" - exclude group: "org.eclipse.jetty", module: "jetty-util" + exclude group: "org.eclipse.jetty" exclude group: "org.apache.hadoop.thirdparty", module: "hadoop-shaded-protobuf_3_7" exclude group: "com.charleskorn.kaml", module:"kaml" @@ -43,13 +43,19 @@ dependencies { implementation(externalDependency.jettison) { because("previous versions are vulnerable") } + implementation(externalDependency.guava) { + because("CVE-2023-2976") + } + implementation('io.airlift:aircompressor:0.27') { + because("CVE-2024-36114") + } } // mock internal schema registry implementation externalDependency.kafkaAvroSerde implementation externalDependency.kafkaAvroSerializer - implementation "org.apache.kafka:kafka_2.12:$kafkaVersion" + implementation "org.apache.kafka:kafka_2.12:3.7.1" implementation externalDependency.slf4jApi compileOnly externalDependency.lombok @@ -75,6 +81,12 @@ dependencies { testImplementation externalDependency.mockito testImplementation externalDependency.testng testRuntimeOnly externalDependency.logbackClassic + + constraints { + implementation(implementation externalDependency.parquetHadoop) { + because("CVE-2022-42003") + } + } } bootJar { @@ -82,16 +94,38 @@ bootJar { archiveFileName = "${project.name}.jar" } +bootRun { + environment "ENTITY_REGISTRY_CONFIG_PATH", "../metadata-models/src/main/resources/entity-registry.yml" + environment "ENABLE_STRUCTURED_PROPERTIES_SYSTEM_UPDATE", "true" + environment "ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX", "true" + environment "SERVER_PORT", "8083" + args += ["-u", "SystemUpdate"] +} + +/** + * Runs SystemUpdate on locally running system + */ +task run(type: Exec) { + dependsOn bootJar + group = "Execution" + description = "Run the datahub-upgrade SystemUpdate process locally." + environment "ENTITY_REGISTRY_CONFIG_PATH", "../metadata-models/src/main/resources/entity-registry.yml" + environment "ENABLE_STRUCTURED_PROPERTIES_SYSTEM_UPDATE", "true" + environment "ELASTICSEARCH_INDEX_BUILDER_MAPPINGS_REINDEX", "true" + commandLine "java", "-jar", "-Dserver.port=8083", bootJar.getArchiveFile().get(), "-u", "SystemUpdate" +} + docker { name "${docker_registry}/${docker_repo}:v${version}" version "v${version}" dockerfile file("${rootProject.projectDir}/docker/${docker_repo}/Dockerfile") files bootJar.outputs.files files fileTree(rootProject.projectDir) { + include '.dockerignore' include "docker/${docker_repo}/*" include 'metadata-models/src/main/resources/*' }.exclude { - i -> i.file.isHidden() || i.file == buildDir + i -> (!i.file.name.endsWith(".dockerignore") && i.file.isHidden()) } tag("Debug", "${docker_registry}/${docker_repo}:debug") @@ -99,6 +133,22 @@ docker { buildx(true) load(true) push(false) + + // Add build args if they are defined (needed for some CI or enterprise environments) + def dockerBuildArgs = [:] + if (project.hasProperty('alpineApkRepositoryUrl')) { + dockerBuildArgs.ALPINE_REPO_URL = project.getProperty('alpineApkRepositoryUrl') + } + if (project.hasProperty('githubMirrorUrl')) { + dockerBuildArgs.GITHUB_REPO_URL = project.getProperty('githubMirrorUrl') + } + if (project.hasProperty('mavenCentralRepositoryUrl')) { + dockerBuildArgs.MAVEN_CENTRAL_REPO_URL = project.getProperty('mavenCentralRepositoryUrl') + } + + if (dockerBuildArgs.size() > 0) { + buildArgs(dockerBuildArgs) + } } tasks.getByPath(":datahub-upgrade:docker").dependsOn([bootJar]) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java index c42e1bb7f92e05..d3aea2a3dac12a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java @@ -1,30 +1,19 @@ package com.linkedin.datahub.upgrade; import com.google.common.collect.ImmutableList; - import java.util.List; - -/** - * Specification of an upgrade to be performed to the DataHub platform. - */ +/** Specification of an upgrade to be performed to the DataHub platform. */ public interface Upgrade { - /** - * String identifier for the upgrade. - */ + /** String identifier for the upgrade. */ String id(); - /** - * Returns a set of steps to perform during the upgrade. - */ + /** Returns a set of steps to perform during the upgrade. */ List steps(); - /** - * Returns a set of steps to perform on upgrade success, failure, or abort. - */ + /** Returns a set of steps to perform on upgrade success, failure, or abort. */ default List cleanupSteps() { return ImmutableList.of(); } - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java index bf356c60a21a4a..6da656020edf84 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java @@ -2,21 +2,15 @@ import java.util.function.BiConsumer; - /** * Step executed on finish of an {@link Upgrade}. * - * Note that this step is not retried, even in case of failures. + *

Note that this step is not retried, even in case of failures. */ public interface UpgradeCleanupStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the cleanup step's logic. - */ + /** Returns a function representing the cleanup step's logic. */ BiConsumer executable(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index e6be6905acceea..6d10a7ed6b3b48 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -1,23 +1,24 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.datahub.upgrade.removeunknownaspects.RemoveUnknownAspects; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import javax.inject.Inject; import javax.inject.Named; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.CommandLineRunner; import org.springframework.stereotype.Component; import picocli.CommandLine; - @Slf4j @Component public class UpgradeCli implements CommandLineRunner { @@ -52,18 +53,22 @@ private static final class Args { @Named("removeUnknownAspects") private RemoveUnknownAspects removeUnknownAspects; - @Inject - @Named("buildIndices") - private BuildIndices buildIndices; - - @Inject - @Named("cleanIndices") - private CleanIndices cleanIndices; - - @Inject + @Autowired(required = false) @Named("systemUpdate") private SystemUpdate systemUpdate; + @Autowired(required = false) + @Named("systemUpdateBlocking") + private SystemUpdateBlocking systemUpdateBlocking; + + @Autowired(required = false) + @Named("systemUpdateNonBlocking") + private SystemUpdateNonBlocking systemUpdateNonBlocking; + + @Autowired + @Named("systemOperationContext") + private OperationContext systemOperationContext; + @Override public void run(String... cmdLineArgs) { _upgradeManager.register(noCodeUpgrade); @@ -71,13 +76,20 @@ public void run(String... cmdLineArgs) { _upgradeManager.register(restoreIndices); _upgradeManager.register(restoreBackup); _upgradeManager.register(removeUnknownAspects); - _upgradeManager.register(buildIndices); - _upgradeManager.register(cleanIndices); - _upgradeManager.register(systemUpdate); + if (systemUpdate != null) { + _upgradeManager.register(systemUpdate); + } + if (systemUpdateBlocking != null) { + _upgradeManager.register(systemUpdateBlocking); + } + if (systemUpdateNonBlocking != null) { + _upgradeManager.register(systemUpdateNonBlocking); + } final Args args = new Args(); new CommandLine(args).setCaseInsensitiveEnumValuesAllowed(true).parseArgs(cmdLineArgs); - UpgradeResult result = _upgradeManager.execute(args.upgradeId.trim(), args.args); + UpgradeResult result = + _upgradeManager.execute(systemOperationContext, args.upgradeId.trim(), args.args); if (UpgradeResult.Result.FAILED.equals(result.result())) { System.exit(1); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 53a5c0758f3189..e17ac6be79face 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.upgrade; +import com.linkedin.gms.factory.auth.AuthorizerChainFactory; +import com.linkedin.gms.factory.auth.DataHubAuthorizerFactory; +import com.linkedin.gms.factory.graphql.GraphQLEngineFactory; +import com.linkedin.gms.factory.kafka.KafkaEventConsumerFactory; +import com.linkedin.gms.factory.kafka.SimpleKafkaConsumerFactory; import com.linkedin.gms.factory.telemetry.ScheduledAnalyticsFactory; import org.springframework.boot.WebApplicationType; import org.springframework.boot.autoconfigure.SpringBootApplication; @@ -8,18 +13,30 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@ComponentScan(basePackages = { - "com.linkedin.gms.factory", - "com.linkedin.datahub.upgrade.config", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class) -}) +@ComponentScan( + basePackages = { + "com.linkedin.gms.factory", + "com.linkedin.datahub.upgrade.config", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = { + ScheduledAnalyticsFactory.class, + AuthorizerChainFactory.class, + DataHubAuthorizerFactory.class, + SimpleKafkaConsumerFactory.class, + KafkaEventConsumerFactory.class, + GraphQLEngineFactory.class + }) + }) public class UpgradeCliApplication { public static void main(String[] args) { - new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class) + .web(WebApplicationType.NONE) + .run(args); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java index 76cfc6321adfdb..73643175ab9c67 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java @@ -1,38 +1,28 @@ package com.linkedin.datahub.upgrade; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Map; import java.util.Optional; - -/** - * Context about a currently running upgrade. - */ +/** Context about a currently running upgrade. */ public interface UpgradeContext { - /** - * Returns the currently running upgrade. - */ + /** Returns the currently running upgrade. */ Upgrade upgrade(); - /** - * Returns the results from steps that have been completed. - */ + /** Returns the results from steps that have been completed. */ List stepResults(); - /** - * Returns a report object where human-readable messages can be logged. - */ + /** Returns a report object where human-readable messages can be logged. */ UpgradeReport report(); - /** - * Returns a list of raw arguments that have been provided as input to the upgrade. - */ + /** Returns a list of raw arguments that have been provided as input to the upgrade. */ List args(); - /** - * Returns a map of argument to <>optional value, as delimited by an '=' character. - */ + /** Returns a map of argument to <>optional value, as delimited by an '=' character. */ Map> parsedArgs(); + /** Returns the operation context ffor the upgrade */ + OperationContext opContext(); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java index 927ccc05783084..14f36e60d75b2d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java @@ -1,21 +1,16 @@ package com.linkedin.datahub.upgrade; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; +import javax.annotation.Nonnull; - -/** - * Responsible for managing the execution of an {@link Upgrade}. - */ +/** Responsible for managing the execution of an {@link Upgrade}. */ public interface UpgradeManager { - /** - * Register an {@link Upgrade} with the manaager. - */ + /** Register an {@link Upgrade} with the manaager. */ void register(Upgrade upgrade); - /** - * Kick off an {@link Upgrade} by identifier. - */ - UpgradeResult execute(String upgradeId, List args); - + /** Kick off an {@link Upgrade} by identifier. */ + UpgradeResult execute( + @Nonnull OperationContext systemOpContext, String upgradeId, List args); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java index 2ed3f105a4edaa..1c677f6fe8578f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java @@ -2,25 +2,15 @@ import java.util.List; - -/** - * A human-readable record of upgrade progress + status. - */ +/** A human-readable record of upgrade progress + status. */ public interface UpgradeReport { - /** - * Adds a new line to the upgrade report. - */ + /** Adds a new line to the upgrade report. */ void addLine(String line); - /** - * Adds a new line to the upgrade report with exception - */ + /** Adds a new line to the upgrade report with exception */ void addLine(String line, Exception e); - /** - * Retrieves the lines in the report. - */ + /** Retrieves the lines in the report. */ List lines(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java index cdb94f0c0bba1a..25dc758575fd16 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java @@ -1,36 +1,21 @@ package com.linkedin.datahub.upgrade; -/** - * Represents the result of executing an {@link Upgrade} - */ +/** Represents the result of executing an {@link Upgrade} */ public interface UpgradeResult { - /** - * The execution result. - */ + /** The execution result. */ enum Result { - /** - * Upgrade succeeded. - */ + /** Upgrade succeeded. */ SUCCEEDED, - /** - * Upgrade failed. - */ + /** Upgrade failed. */ FAILED, - /** - * Upgrade was aborted. - */ + /** Upgrade was aborted. */ ABORTED } - /** - * Returns the {@link Result} of executing an {@link Upgrade} - */ + /** Returns the {@link Result} of executing an {@link Upgrade} */ Result result(); - /** - * Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. - */ + /** Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. */ UpgradeReport report(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java index b85bd7a51e3dd5..3f90dcb33a0052 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java @@ -2,39 +2,29 @@ import java.util.function.Function; - -/** - * Represents a single executable step in an {@link Upgrade}. - */ +/** Represents a single executable step in an {@link Upgrade}. */ public interface UpgradeStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the step's execution logic. - */ + /** Returns a function representing the step's execution logic. */ Function executable(); - /** - * Returns the number of times the step should be retried. - */ + /** Returns the number of times the step should be retried. */ default int retryCount() { return 0; } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ default boolean isOptional() { return false; } - /** - * Returns whether or not to skip the step based on the UpgradeContext - */ + /** Returns whether or not to skip the step based on the UpgradeContext */ default boolean skip(UpgradeContext context) { return false; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java index 60d51f9ba476cc..04b3d4b8559e67 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java @@ -2,52 +2,33 @@ public interface UpgradeStepResult { - /** - * Returns a string identifier associated with the step. - */ + /** Returns a string identifier associated with the step. */ String stepId(); - /** - * The outcome of the step execution. - */ + /** The outcome of the step execution. */ enum Result { - /** - * The step succeeded. - */ + /** The step succeeded. */ SUCCEEDED, - /** - * The step failed. - */ + /** The step failed. */ FAILED } - /** - * A control-flow action to perform as a result of the step execution. - */ + /** A control-flow action to perform as a result of the step execution. */ enum Action { - /** - * Continue attempting the upgrade. - */ + /** Continue attempting the upgrade. */ CONTINUE, - /** - * Immediately fail the upgrade, without retry. - */ + /** Immediately fail the upgrade, without retry. */ FAIL, - /** - * Immediately abort the upgrade, without retry. - */ + /** Immediately abort the upgrade, without retry. */ ABORT } - /** - * Returns the result of executing the step, either success or failure. - */ + /** Returns the result of executing the step, either success or failure. */ Result result(); - /** - * Returns the action to perform after executing the step, either continue or abort. - */ + /** Returns the action to perform after executing the step, either continue or abort. */ default Action action() { return Action.CONTINUE; - }; + } + ; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java index a6f3ef55604424..8d5f1118433fc0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java @@ -19,10 +19,12 @@ public static Map> parseArgs(final List args) { for (final String arg : args) { List parsedArg = Arrays.asList(arg.split(KEY_VALUE_DELIMITER, 2)); - parsedArgs.put(parsedArg.get(0), parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); + parsedArgs.put( + parsedArg.get(0), + parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); } return parsedArgs; } - private UpgradeUtils() { } + private UpgradeUtils() {} } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java index 4f980b11b888a5..393b5411599adc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.graph.GraphService; import java.util.function.Function; - public class ClearGraphServiceStep implements UpgradeStep { private final String deletePattern = ".*"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java index fca8f60aefd95b..44592ecf92dbda 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java @@ -8,13 +8,13 @@ import com.linkedin.metadata.search.EntitySearchService; import java.util.function.Function; - public class ClearSearchServiceStep implements UpgradeStep { private final EntitySearchService _entitySearchService; private final boolean _alwaysRun; - public ClearSearchServiceStep(final EntitySearchService entitySearchService, final boolean alwaysRun) { + public ClearSearchServiceStep( + final EntitySearchService entitySearchService, final boolean alwaysRun) { _entitySearchService = entitySearchService; _alwaysRun = alwaysRun; } @@ -45,7 +45,7 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entitySearchService.clear(); + _entitySearchService.clear(context.opContext()); } catch (Exception e) { context.report().addLine("Failed to clear search service", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSystemMetadataServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSystemMetadataServiceStep.java new file mode 100644 index 00000000000000..889d2f0a79edf6 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSystemMetadataServiceStep.java @@ -0,0 +1,56 @@ +package com.linkedin.datahub.upgrade.common.steps; + +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; +import java.util.function.Function; + +public class ClearSystemMetadataServiceStep implements UpgradeStep { + + private final SystemMetadataService _systemMetadataService; + private final boolean _alwaysRun; + + public ClearSystemMetadataServiceStep( + final SystemMetadataService systemMetadataService, final boolean alwaysRun) { + _systemMetadataService = systemMetadataService; + _alwaysRun = alwaysRun; + } + + @Override + public String id() { + return "ClearSystemMetadataServiceStep"; + } + + @Override + public boolean skip(UpgradeContext context) { + if (_alwaysRun) { + return false; + } + if (context.parsedArgs().containsKey(NoCodeUpgrade.CLEAN_ARG_NAME)) { + return false; + } + context.report().addLine("Cleanup has not been requested."); + return true; + } + + @Override + public int retryCount() { + return 1; + } + + @Override + public Function executable() { + return (context) -> { + try { + _systemMetadataService.clear(); + } catch (Exception e) { + context.report().addLine("Failed to clear system metadata service", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index 270aa11c7b0701..a80adabc60e743 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -4,15 +4,16 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; - +@Slf4j @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient systemEntityClient; @Override public String id() { @@ -28,9 +29,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(false); + systemEntityClient.setWritable(context.opContext(), false); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode off in GMS", e); context.report().addLine("Failed to turn write mode off in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8df02123983e8b..a6b2942fabffd7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -4,14 +4,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import java.util.function.Function; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; - +@Slf4j @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { - private final SystemRestliEntityClient _entityClient; + private final SystemEntityClient systemEntityClient; @Override public String id() { @@ -27,9 +28,9 @@ public int retryCount() { public Function executable() { return (context) -> { try { - _entityClient.setWritable(true); + systemEntityClient.setWritable(context.opContext(), true); } catch (Exception e) { - e.printStackTrace(); + log.error("Failed to turn write mode back on in GMS", e); context.report().addLine("Failed to turn write mode back on in GMS"); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java index 1391ef685c335d..4e7447cb1e2cb6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.common.steps; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,9 +20,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class GMSQualificationStep implements UpgradeStep { @@ -70,9 +69,16 @@ private boolean isEligible(ObjectNode configJson) { @Override public Function executable() { return (context) -> { - String gmsHost = System.getenv("DATAHUB_GMS_HOST") == null ? "localhost" : System.getenv("DATAHUB_GMS_HOST"); - String gmsPort = System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); - String gmsProtocol = System.getenv("DATAHUB_GMS_PROTOCOL") == null ? "http" : System.getenv("DATAHUB_GMS_PROTOCOL"); + String gmsHost = + System.getenv("DATAHUB_GMS_HOST") == null + ? "localhost" + : System.getenv("DATAHUB_GMS_HOST"); + String gmsPort = + System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); + String gmsProtocol = + System.getenv("DATAHUB_GMS_PROTOCOL") == null + ? "http" + : System.getenv("DATAHUB_GMS_PROTOCOL"); try { String spec = String.format("%s://%s:%s/config", gmsProtocol, gmsHost, gmsPort); @@ -81,33 +87,37 @@ public Function executable() { String responseString = convertStreamToString(response); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, - MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode configJson = mapper.readTree(responseString); if (isEligible((ObjectNode) configJson)) { - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } else { - context.report().addLine(String.format("Failed to qualify GMS. It is not running on the latest version." - + "Re-run GMS on the latest datahub release")); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "Failed to qualify GMS. It is not running on the latest version." + + "Re-run GMS on the latest datahub release")); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } catch (Exception e) { e.printStackTrace(); - context.report().addLine(String.format("ERROR: Cannot connect to GMS" - + "at %s://host %s port %s. Make sure GMS is on the latest version " - + "and is running at that host before starting the migration.", - gmsProtocol, - gmsHost, - gmsPort)); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "ERROR: Cannot connect to GMS" + + "at %s://host %s port %s. Make sure GMS is on the latest version " + + "and is running at that host before starting the migration.", + gmsProtocol, gmsHost, gmsPort)); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 16e5e4247267f1..a33722d7761cc4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -1,17 +1,28 @@ package com.linkedin.datahub.upgrade.config; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.browsepaths.BackfillBrowsePathsV2; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; - @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2(EntityService entityService, SearchService searchService) { - return new BackfillBrowsePathsV2(entityService, searchService); + public NonBlockingSystemUpgrade backfillBrowsePathsV2( + final OperationContext opContext, + EntityService entityService, + SearchService searchService, + @Value("${systemUpdate.browsePathsV2.enabled}") final boolean enabled, + @Value("${systemUpdate.browsePathsV2.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.browsePathsV2.batchSize}") final Integer batchSize) { + return new BackfillBrowsePathsV2( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java new file mode 100644 index 00000000000000..7226ec267dbbc5 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillPolicyFieldsConfig.java @@ -0,0 +1,27 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.policyfields.BackfillPolicyFields; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) +public class BackfillPolicyFieldsConfig { + + @Bean + public BackfillPolicyFields backfillPolicyFields( + final OperationContext opContext, + EntityService entityService, + SearchService searchService, + @Value("${systemUpdate.policyFields.enabled}") final boolean enabled, + @Value("${systemUpdate.policyFields.reprocess.enabled}") final boolean reprocessEnabled, + @Value("${systemUpdate.policyFields.batchSize}") final Integer batchSize) { + return new BackfillPolicyFields( + opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index e98f0dc2093f62..e0de8a7255d61e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -1,25 +1,39 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; - @Configuration +@Conditional(SystemUpdateCondition.BlockingSystemUpdateCondition.class) public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public BlockingSystemUpgrade buildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao) { - return new BuildIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new BuildIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider, + aspectDao); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 558c9780911ac5..7559aaf3f3cdbc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -1,25 +1,39 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; - @Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) public class CleanIndicesConfig { - @Bean(name = "cleanIndices") - public CleanIndices cleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + @Bean + public NonBlockingSystemUpgrade cleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao) { - return new CleanIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new CleanIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider, + aspectDao); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ea81009fa1da..5ba5c8a90fd4ac 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -1,28 +1,35 @@ package com.linkedin.datahub.upgrade.config; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - +@Slf4j @Configuration public class NoCodeCleanupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeCleanup") - @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) + @DependsOn({ + "ebeanServer", + "graphService", + "elasticSearchRestHighLevelClient", + INDEX_CONVENTION_BEAN + }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -31,4 +38,12 @@ public NoCodeCleanupUpgrade createInstance() { final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); return new NoCodeCleanupUpgrade(ebeanServer, graphClient, searchClient, indexConvention); } + + @Bean(name = "noCodeCleanup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeCleanupUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeCleanupUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index cd264e529e9a57..741aeece1cf62f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -1,33 +1,43 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class NoCodeUpgradeConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @DependsOn({"ebeanServer", "entityService", "systemEntityClient", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient entityClient = applicationContext.getBean(SystemEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); } + + @Bean(name = "noCodeUpgrade") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java new file mode 100644 index 00000000000000..4956254062ff96 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/ReindexDataJobViaNodesCLLConfig.java @@ -0,0 +1,29 @@ +package com.linkedin.datahub.upgrade.config; + +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityService; +import io.datahubproject.metadata.context.OperationContext; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; +import org.springframework.context.annotation.Configuration; + +@Configuration +@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) +public class ReindexDataJobViaNodesCLLConfig { + + @Bean + public NonBlockingSystemUpgrade reindexDataJobViaNodesCLL( + final OperationContext opContext, + final EntityService entityService, + final AspectDao aspectDao, + @Value("${systemUpdate.dataJobNodeCLL.enabled}") final boolean enabled, + @Value("${systemUpdate.dataJobNodeCLL.batchSize}") final Integer batchSize, + @Value("${systemUpdate.dataJobNodeCLL.delayMs}") final Integer delayMs, + @Value("${systemUpdate.dataJobNodeCLL.limit}") final Integer limit) { + return new ReindexDataJobViaNodesCLL( + opContext, entityService, aspectDao, enabled, batchSize, delayMs, limit); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index cdc739efc416dd..5bf1241e21305b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -5,11 +5,10 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") - public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { + public RemoveUnknownAspects removeUnknownAspects(EntityService entityService) { return new RemoveUnknownAspects(entityService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 97a08800534de8..b4eafb4ad3d241 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -1,38 +1,66 @@ package com.linkedin.datahub.upgrade.config; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class RestoreBackupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreBackup") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "graphService", - "searchService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemEntityClient", + "systemMetadataService", + "searchService", + "graphService", + "entityRegistry" + }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); - final GraphService graphClient = applicationContext.getBean(GraphService.class); - final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemEntityClient systemEntityClient = + applicationContext.getBean(SystemEntityClient.class); + final SystemMetadataService systemMetadataService = + applicationContext.getBean(SystemMetadataService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); + final GraphService graphService = applicationContext.getBean(GraphService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreBackup(ebeanServer, entityService, entityRegistry, entityClient, - graphClient, searchClient); + return new RestoreBackup( + ebeanServer, + entityService, + entityRegistry, + systemEntityClient, + systemMetadataService, + entitySearchService, + graphService); + } + + @Bean(name = "restoreBackup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreBackup createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreBackup(null, null, null, null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 663cad4a4bff63..949b75edaa6ba0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -3,33 +3,51 @@ import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class RestoreIndicesConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") - @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemMetadataService", + "searchService", + "graphService" + }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); - final EntityService entityService = applicationContext.getBean(EntityService.class); - final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); + final EntityService entityService = applicationContext.getBean(EntityService.class); + final SystemMetadataService systemMetadataService = + applicationContext.getBean(SystemMetadataService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); - final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices(ebeanServer, entityService, entityRegistry, entitySearchService, - graphService); + return new RestoreIndices( + ebeanServer, entityService, systemMetadataService, entitySearchService, graphService); + } + + @Bean(name = "restoreIndices") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreIndices createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreIndices(null, null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java new file mode 100644 index 00000000000000..0d65af742a5925 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateCondition.java @@ -0,0 +1,48 @@ +package com.linkedin.datahub.upgrade.config; + +import java.util.Objects; +import java.util.Set; +import org.springframework.boot.ApplicationArguments; +import org.springframework.context.annotation.Condition; +import org.springframework.context.annotation.ConditionContext; +import org.springframework.core.type.AnnotatedTypeMetadata; + +public class SystemUpdateCondition implements Condition { + public static final String SYSTEM_UPDATE_ARG = "SystemUpdate"; + public static final String BLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "Blocking"; + public static final String NONBLOCKING_SYSTEM_UPDATE_ARG = SYSTEM_UPDATE_ARG + "NonBlocking"; + public static final Set SYSTEM_UPDATE_ARGS = + Set.of(SYSTEM_UPDATE_ARG, BLOCKING_SYSTEM_UPDATE_ARG, NONBLOCKING_SYSTEM_UPDATE_ARG); + + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context.getBeanFactory().getBean(ApplicationArguments.class).getNonOptionArgs().stream() + .filter(Objects::nonNull) + .anyMatch(SYSTEM_UPDATE_ARGS::contains); + } + + public static class BlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch(arg -> SYSTEM_UPDATE_ARG.equals(arg) || BLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } + } + + public static class NonBlockingSystemUpdateCondition implements Condition { + @Override + public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) { + return context + .getBeanFactory() + .getBean(ApplicationArguments.class) + .getNonOptionArgs() + .stream() + .anyMatch( + arg -> SYSTEM_UPDATE_ARG.equals(arg) || NONBLOCKING_SYSTEM_UPDATE_ARG.equals(arg)); + } + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 9848fc7a0008f7..cac9b5f9483d41 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.upgrade.config; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; -import com.linkedin.gms.factory.common.TopicConventionFactory; +import com.linkedin.datahub.upgrade.system.SystemUpdateBlocking; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.kafka.DataHubKafkaProducerFactory; +import com.linkedin.gms.factory.kafka.common.TopicConventionFactory; +import com.linkedin.gms.factory.kafka.schemaregistry.InternalSchemaRegistryFactory; import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.metadata.dao.producer.KafkaHealthChecker; import com.linkedin.metadata.version.GitVersion; import com.linkedin.mxe.TopicConvention; +import java.util.List; import lombok.extern.slf4j.Slf4j; import org.apache.avro.generic.IndexedRecord; import org.apache.kafka.clients.producer.KafkaProducer; @@ -20,22 +24,37 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Conditional; import org.springframework.context.annotation.Configuration; - +import org.springframework.context.annotation.Primary; @Slf4j @Configuration +@Conditional(SystemUpdateCondition.class) public class SystemUpdateConfig { + @Bean(name = "systemUpdate") - public SystemUpdate systemUpdate(final BuildIndices buildIndices, final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate systemUpdate( + final List blockingSystemUpgrades, + final List nonBlockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdate(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } - String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate(buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + @Bean(name = "systemUpdateBlocking") + public SystemUpdateBlocking systemUpdateBlocking( + final List blockingSystemUpgrades, + final DataHubStartupStep dataHubStartupStep) { + return new SystemUpdateBlocking(blockingSystemUpgrades, List.of(), dataHubStartupStep); + } + + @Bean(name = "systemUpdateNonBlocking") + public SystemUpdateNonBlocking systemUpdateNonBlocking( + final List nonBlockingSystemUpgrades) { + return new SystemUpdateNonBlocking(List.of(), nonBlockingSystemUpgrades, null); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -46,20 +65,59 @@ public String getRevision() { return revision; } + @Bean + public DataHubStartupStep dataHubStartupStep( + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision) { + return new DataHubStartupStep( + kafkaEventProducer, String.format("%s-%s", gitVersion.getVersion(), revision)); + } + @Autowired @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "duheKafkaEventProducer") - protected KafkaEventProducer duheKafkaEventProducer(@Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties properties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + protected KafkaEventProducer duheKafkaEventProducer( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Producer producer = new KafkaProducer<>( - DataHubKafkaProducerFactory.buildProducerProperties(duheSchemaRegistryConfig, kafkaConfiguration, properties)); + Producer producer = + new KafkaProducer<>( + DataHubKafkaProducerFactory.buildProducerProperties( + duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } + + /** + * The ReindexDataJobViaNodesCLLConfig step requires publishing to MCL. Overriding the default + * producer with this special producer which doesn't require an active registry. + * + *

Use when INTERNAL registry and is SYSTEM_UPDATE + * + *

This forces this producer into the EntityService + */ + @Primary + @Bean(name = "kafkaEventProducer") + @ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) + protected KafkaEventProducer kafkaEventProducer( + @Qualifier("duheKafkaEventProducer") KafkaEventProducer kafkaEventProducer) { + return kafkaEventProducer; + } + + @Primary + @Bean(name = "schemaRegistryConfig") + @ConditionalOnProperty( + name = "kafka.schemaRegistry.type", + havingValue = InternalSchemaRegistryFactory.TYPE) + protected SchemaRegistryConfig schemaRegistryConfig( + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + return duheSchemaRegistryConfig; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 972b55f2001f12..c4cfad53624764 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -5,53 +5,41 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeUtils; +import io.datahubproject.metadata.context.OperationContext; import java.util.List; import java.util.Map; import java.util.Optional; +import javax.annotation.Nonnull; +import lombok.Getter; +import lombok.experimental.Accessors; - +@Getter +@Accessors(fluent = true) public class DefaultUpgradeContext implements UpgradeContext { - private final Upgrade _upgrade; - private final UpgradeReport _report; - private final List _previousStepResults; - private final List _args; - private final Map> _parsedArgs; + private final OperationContext opContext; + private final Upgrade upgrade; + private final UpgradeReport report; + private final List previousStepResults; + private final List args; + private final Map> parsedArgs; DefaultUpgradeContext( + @Nonnull OperationContext opContext, Upgrade upgrade, UpgradeReport report, List previousStepResults, List args) { - _upgrade = upgrade; - _report = report; - _previousStepResults = previousStepResults; - _args = args; - _parsedArgs = UpgradeUtils.parseArgs(args); - } - - @Override - public Upgrade upgrade() { - return _upgrade; + this.opContext = opContext; + this.upgrade = upgrade; + this.report = report; + this.previousStepResults = previousStepResults; + this.args = args; + this.parsedArgs = UpgradeUtils.parseArgs(args); } @Override public List stepResults() { - return _previousStepResults; - } - - @Override - public UpgradeReport report() { - return _report; - } - - @Override - public List args() { - return _args; - } - - @Override - public Map> parsedArgs() { - return _parsedArgs; + return previousStepResults; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index a642ee3fb0a90a..240ec9f7bb2fed 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -11,13 +11,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.metadata.utils.metrics.MetricUtils; +import io.datahubproject.metadata.context.OperationContext; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; - +@Slf4j public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -28,20 +30,25 @@ public void register(@Nonnull Upgrade upgrade) { } @Override - public UpgradeResult execute(String upgradeId, List args) { + public UpgradeResult execute( + @Nonnull OperationContext systemOpContext, String upgradeId, List args) { if (_upgrades.containsKey(upgradeId)) { - return executeInternal(_upgrades.get(upgradeId), args); + return executeInternal(systemOpContext, _upgrades.get(upgradeId), args); } - throw new IllegalArgumentException(String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); + throw new IllegalArgumentException( + String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); } - private UpgradeResult executeInternal(Upgrade upgrade, List args) { + private UpgradeResult executeInternal( + @Nonnull OperationContext systemOpContext, Upgrade upgrade, List args) { final UpgradeReport upgradeReport = new DefaultUpgradeReport(); - final UpgradeContext context = new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); + final UpgradeContext context = + new DefaultUpgradeContext(systemOpContext, upgrade, upgradeReport, new ArrayList<>(), args); upgradeReport.addLine(String.format("Starting upgrade with id %s...", upgrade.id())); UpgradeResult result = executeInternal(context); upgradeReport.addLine( - String.format("Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); + String.format( + "Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); executeCleanupInternal(context, result); return result; } @@ -58,12 +65,16 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (step.skip(context)) { upgradeReport.addLine( - String.format(String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); continue; } upgradeReport.addLine( - String.format(String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); final UpgradeStepResult stepResult = executeStepInternal(context, step); stepResults.add(stepResult); @@ -71,7 +82,8 @@ private UpgradeResult executeInternal(UpgradeContext context) { // Apply Actions if (UpgradeStepResult.Action.ABORT.equals(stepResult.action())) { upgradeReport.addLine( - String.format("Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", + String.format( + "Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", step.id())); return new DefaultUpgradeResult(UpgradeResult.Result.ABORTED, upgradeReport); } @@ -80,23 +92,27 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (UpgradeStepResult.Result.FAILED.equals(stepResult.result())) { if (step.isOptional()) { upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", i + 1, - steps.size(), step.id())); + String.format( + "Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", + i + 1, steps.size(), step.id())); continue; } // Required step failed. Fail the entire upgrade process. upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Failed after %s retries.", i + 1, steps.size(), step.id(), - step.retryCount())); + String.format( + "Failed Step %s/%s: %s. Failed after %s retries.", + i + 1, steps.size(), step.id(), step.retryCount())); upgradeReport.addLine(String.format("Exiting upgrade %s with failure.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.FAILED, upgradeReport); } - upgradeReport.addLine(String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); + upgradeReport.addLine( + String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); } - upgradeReport.addLine(String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); + upgradeReport.addLine( + String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.SUCCEEDED, upgradeReport); } @@ -105,15 +121,19 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte UpgradeStepResult result = null; int maxAttempts = retryCount + 1; for (int i = 0; i < maxAttempts; i++) { - try (Timer.Context completionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { - try (Timer.Context executionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { + try (Timer.Context completionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { + try (Timer.Context executionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { result = step.executable().apply(context); } if (result == null) { // Failed to even retrieve a result. Create a default failure result. result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); - context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); + context + .report() + .addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); MetricUtils.counter(MetricRegistry.name(step.id(), "retry")).inc(); } @@ -122,9 +142,12 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { - context.report() + log.error("Caught exception during attempt {} of Step with id {}", i, step.id(), e); + context + .report() .addLine( - String.format("Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); + String.format( + "Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); MetricUtils.counter(MetricRegistry.name(step.id(), "failed")).inc(); result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); @@ -139,7 +162,11 @@ private void executeCleanupInternal(UpgradeContext context, UpgradeResult result try { step.executable().accept(context, result); } catch (Exception e) { - context.report().addLine(String.format("Caught exception while executing cleanup step with id %s", step.id())); + context + .report() + .addLine( + String.format( + "Caught exception while executing cleanup step with id %s", step.id())); } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java index 19706937e20ca8..913b0ff20e6ff9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java @@ -1,10 +1,9 @@ package com.linkedin.datahub.upgrade.impl; import com.linkedin.datahub.upgrade.UpgradeReport; -import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultUpgradeReport implements UpgradeReport { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java index 6ecb5228482917..cf0e7221b406b0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeResult; - public class DefaultUpgradeResult implements UpgradeResult { private final Result _result; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java index d0c086f607edd2..e11eaf89bfc8d2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; - public class DefaultUpgradeStepResult implements UpgradeStepResult { private final String _stepId; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 7ed7169bf20bcc..3b3098f43c4734 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import io.ebean.Database; import java.util.function.Function; @@ -36,40 +36,42 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - - DbType targetDbType = context.parsedArgs().containsKey(DB_TYPE_ARG) - ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) - : DbType.MYSQL; + DbType targetDbType = + context.parsedArgs().containsKey(DB_TYPE_ARG) + ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) + : DbType.MYSQL; String sqlUpdateStr; switch (targetDbType) { case POSTGRES: - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint not null,\n" - + " metadata text not null,\n" - + " systemmetadata text,\n" - + " createdon timestamp not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint not null,\n" + + " metadata text not null,\n" + + " systemmetadata text,\n" + + " createdon timestamp not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; default: // both mysql and maria - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint(20) not null,\n" - + " metadata longtext not null,\n" - + " systemmetadata longtext,\n" - + " createdon datetime(6) not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint(20) not null,\n" + + " metadata longtext not null,\n" + + " systemmetadata longtext,\n" + + " createdon datetime(6) not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; } @@ -77,9 +79,7 @@ public Function executable() { _server.execute(_server.createSqlUpdate(sqlUpdateStr)); } catch (Exception e) { context.report().addLine("Failed to create table metadata_aspect_v2", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 1b5770a11ff62c..94bf97f3c9c9e7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -1,22 +1,23 @@ package com.linkedin.datahub.upgrade.nocode; +import com.datahub.util.RecordUtils; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.PegasusUtils; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.PagedList; @@ -29,13 +30,13 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; - public class DataMigrationStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; private static final long DEFAULT_BATCH_DELAY_MS = 250; - private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); + private static final String BROWSE_PATHS_ASPECT_NAME = + PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); private final Database _server; private final EntityService _entityService; @@ -64,7 +65,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting data migration..."); final int rowCount = _server.find(EbeanAspectV1.class).findCount(); context.report().addLine(String.format("Found %s rows in legacy aspects table", rowCount)); @@ -74,7 +74,11 @@ public Function executable() { int count = getBatchSize(context.parsedArgs()); while (start < rowCount) { - context.report().addLine(String.format("Reading rows %s through %s from legacy aspects table.", start, start + count)); + context + .report() + .addLine( + String.format( + "Reading rows %s through %s from legacy aspects table.", start, start + count)); PagedList rows = getPagedAspects(start, count); for (EbeanAspectV1 oldAspect : rows.getList()) { @@ -84,11 +88,18 @@ public Function executable() { // 1. Instantiate the RecordTemplate class associated with the aspect. final RecordTemplate aspectRecord; try { - aspectRecord = RecordUtils.toRecordTemplate( - Class.forName(oldAspectName).asSubclass(RecordTemplate.class), - oldAspect.getMetadata()); + aspectRecord = + RecordUtils.toRecordTemplate( + Class.forName(oldAspectName).asSubclass(RecordTemplate.class), + oldAspect.getMetadata()); } catch (Exception e) { - context.report().addLine(String.format("Failed to convert aspect with name %s into a RecordTemplate class", oldAspectName), e); + context + .report() + .addLine( + String.format( + "Failed to convert aspect with name %s into a RecordTemplate class", + oldAspectName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -97,7 +108,11 @@ public Function executable() { try { urn = Urn.createFromString(oldAspect.getKey().getUrn()); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to bind Urn with value %s into Urn object", oldAspect.getKey().getUrn()), e); + throw new RuntimeException( + String.format( + "Failed to bind Urn with value %s into Urn object", + oldAspect.getKey().getUrn()), + e); } // 3. Verify that the entity associated with the aspect is found in the registry. @@ -106,7 +121,12 @@ public Function executable() { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find Entity with name %s in Entity Registry", entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -115,9 +135,13 @@ public Function executable() { try { newAspectName = PegasusUtils.getAspectNameFromSchema(aspectRecord.schema()); } catch (Exception e) { - context.report().addLine(String.format("Failed to retrieve @Aspect name from schema %s, urn %s", - aspectRecord.schema().getFullName(), - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to retrieve @Aspect name from schema %s, urn %s", + aspectRecord.schema().getFullName(), entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -126,33 +150,45 @@ public Function executable() { try { aspectSpec = entitySpec.getAspectSpec(newAspectName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - newAspectName, - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + newAspectName, entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; _entityService.ingestAspects( + context.opContext(), urn, List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), - null - ); + null); // 7. If necessary, emit a browse path aspect. - if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) && !urnsWithBrowsePath.contains(urn)) { + if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) + && !urnsWithBrowsePath.contains(urn)) { // Emit a browse path aspect. final BrowsePaths browsePaths; try { - browsePaths = _entityService.buildDefaultBrowsePath(urn); + browsePaths = + DefaultAspectsUtil.buildDefaultBrowsePath( + context.opContext(), urn, _entityService); final AuditStamp browsePathsStamp = new AuditStamp(); browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); + _entityService.ingestAspects( + context.opContext(), + urn, + List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), + browsePathsStamp, + null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { @@ -167,13 +203,17 @@ public Function executable() { try { TimeUnit.MILLISECONDS.sleep(getBatchDelayMs(context.parsedArgs())); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } } if (totalRowsMigrated != rowCount) { - context.report().addLine(String.format("Number of rows migrated %s does not equal the number of input rows %s...", - totalRowsMigrated, - rowCount)); + context + .report() + .addLine( + String.format( + "Number of rows migrated %s does not equal the number of input rows %s...", + totalRowsMigrated, rowCount)); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -195,9 +235,9 @@ private AuditStamp toAuditStamp(final EbeanAspectV1 aspect) { return auditStamp; } - private PagedList getPagedAspects(final int start, final int pageSize) { - return _server.find(EbeanAspectV1.class) + return _server + .find(EbeanAspectV1.class) .select(EbeanAspectV1.ALL_COLUMNS) .setFirstRow(start) .setMaxRows(pageSize) @@ -219,7 +259,8 @@ private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (parsedArgs.containsKey(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME) && parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).isPresent()) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index a299deb8747212..1524a015e414e4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -6,13 +6,14 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSQualificationStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; public class NoCodeUpgrade implements Upgrade { @@ -26,15 +27,17 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( - final Database server, - final EntityService entityService, + @Nullable final Database server, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps( - server, entityService, - entityRegistry, - entityClient); - _cleanupSteps = buildCleanupSteps(); + final SystemEntityClient entityClient) { + if (server != null) { + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override @@ -58,9 +61,9 @@ private List buildCleanupSteps() { private List buildUpgradeSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient) { + final SystemEntityClient entityClient) { final List steps = new ArrayList<>(); steps.add(new RemoveAspectV2TableStep(server)); steps.add(new GMSQualificationStep(ImmutableMap.of("noCode", "true"))); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index cf8e848762f143..6180573d902d22 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -7,10 +7,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class RemoveAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index 0fe9afa8cc6f83..d22af9d2924003 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; import io.ebean.Database; import java.util.function.Function; @@ -29,7 +29,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - if (context.parsedArgs().containsKey(NoCodeUpgrade.FORCE_UPGRADE_ARG_NAME)) { context.report().addLine("Forced upgrade detected. Proceeding with upgrade..."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -43,7 +42,8 @@ public Function executable() { } // Unqualified (Table already exists) context.report().addLine("Failed to qualify upgrade candidate. Aborting the upgrade..."); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); + return new DefaultUpgradeStepResult( + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); @@ -67,8 +67,13 @@ private boolean isQualified(Database server, UpgradeContext context) { return true; } context.report().addLine(String.format("-- V2 table has %d rows", v2TableRowCount)); - context.report().addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); - context.report().addLine("-- If V2 table has significantly less rows, consider running the forced upgrade. "); + context + .report() + .addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); + context + .report() + .addLine( + "-- If V2 table has significantly less rows, consider running the forced upgrade. "); return false; } context.report().addLine("-- V2 table does not exist"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 8005e31e01c671..ba0a0124545e9d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -7,7 +7,6 @@ import io.ebean.Database; import java.util.function.Function; - // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { @@ -34,9 +33,7 @@ public Function executable() { _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java index 12ff125a05127c..5066e05f8bf5a1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java @@ -6,10 +6,8 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; // Do we need SQL-tech specific migration paths? @Slf4j @@ -44,9 +42,7 @@ public Function executable() { } } catch (Exception e) { context.report().addLine("Failed to delete legacy data from graph", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java index 9a64d5fe1810c9..05656373377b93 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java @@ -11,7 +11,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; - // Do we need SQL-tech specific migration paths? @RequiredArgsConstructor public class DeleteLegacySearchIndicesStep implements UpgradeStep { @@ -20,7 +19,8 @@ public class DeleteLegacySearchIndicesStep implements UpgradeStep { private final RestHighLevelClient _searchClient; - public DeleteLegacySearchIndicesStep(final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public DeleteLegacySearchIndicesStep( + final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _searchClient = searchClient; deletePattern = indexConvention.getPrefix().map(p -> p + "_").orElse("") + "*document*"; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index a5d8d6ce9b666f..6d3125423b4433 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -9,19 +9,27 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; import org.opensearch.client.RestHighLevelClient; - public class NoCodeCleanupUpgrade implements Upgrade { private final List _steps; private final List _cleanupSteps; // Upgrade requires the Database. - public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { - _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); - _cleanupSteps = buildCleanupSteps(); + public NoCodeCleanupUpgrade( + @Nullable final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { + if (server != null) { + _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override @@ -43,8 +51,11 @@ private List buildCleanupSteps() { return Collections.emptyList(); } - private List buildUpgradeSteps(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + private List buildUpgradeSteps( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { final List steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); steps.add(new DeleteAspectTableStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 67a226f8f0676c..15c7584532e2ca 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -8,7 +8,6 @@ import io.ebean.Database; import java.util.function.Function; - public class NoCodeUpgradeQualificationStep implements UpgradeStep { private final Database _server; @@ -33,23 +32,19 @@ public Function executable() { try { if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { // Unqualified (V2 Table does not exist) - context.report().addLine("You have not successfully migrated yet. Aborting the cleanup..."); + context + .report() + .addLine("You have not successfully migrated yet. Aborting the cleanup..."); return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED, - UpgradeStepResult.Action.ABORT); + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } else { // Qualified. context.report().addLine("Found qualified upgrade candidate. Proceeding with upgrade..."); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists: %s", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index b55d439745e691..6054599aa843c1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -11,14 +11,13 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements UpgradeStep { private static final String INVALID_CLIENT_ID_ASPECT = "clientId"; - private final EntityService _entityService; + private final EntityService _entityService; @Override public String id() { @@ -33,9 +32,14 @@ public boolean skip(UpgradeContext context) { @Override public Function executable() { return upgradeContext -> { - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, - new HashMap<>(), true); - return (UpgradeStepResult) new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + _entityService.deleteAspect( + upgradeContext.opContext(), + TelemetryUtils.CLIENT_ID_URN, + INVALID_CLIENT_ID_ASPECT, + new HashMap<>(), + true); + return (UpgradeStepResult) + new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index f8af69dba08653..3ea449051b3558 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -8,12 +8,11 @@ import java.util.ArrayList; import java.util.List; - public class RemoveUnknownAspects implements Upgrade { private final List _steps; - public RemoveUnknownAspects(final EntityService entityService) { + public RemoveUnknownAspects(final EntityService entityService) { _steps = buildSteps(entityService); } @@ -27,7 +26,7 @@ public List steps() { return _steps; } - private List buildSteps(final EntityService entityService) { + private List buildSteps(final EntityService entityService) { final List steps = new ArrayList<>(); steps.add(new RemoveClientIdAspectStep(entityService)); return steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 0303739e62afec..addf6dcb89c1ae 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -8,10 +8,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class ClearAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 9175ad606e3c87..7496655e581b09 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -6,30 +6,45 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.ClearGraphServiceStep; import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; +import com.linkedin.datahub.upgrade.common.steps.ClearSystemMetadataServiceStep; import com.linkedin.datahub.upgrade.common.steps.GMSDisableWriteModeStep; import com.linkedin.datahub.upgrade.common.steps.GMSEnableWriteModeStep; -import com.linkedin.entity.client.SystemRestliEntityClient; +import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; import io.ebean.Database; import java.util.ArrayList; import java.util.List; - +import javax.annotation.Nullable; public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final Database server, - final EntityService entityService, + @Nullable final Database server, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, - final GraphService graphClient, - final EntitySearchService searchClient) { - _steps = buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + final SystemEntityClient systemEntityClient, + final SystemMetadataService systemMetadataService, + final EntitySearchService entitySearchService, + final GraphService graphClient) { + if (server != null) { + _steps = + buildSteps( + server, + entityService, + entityRegistry, + systemEntityClient, + systemMetadataService, + entitySearchService, + graphClient); + } else { + _steps = List.of(); + } } @Override @@ -44,18 +59,20 @@ public List steps() { private List buildSteps( final Database server, - final EntityService entityService, + final EntityService entityService, final EntityRegistry entityRegistry, - final SystemRestliEntityClient entityClient, - final GraphService graphClient, - final EntitySearchService searchClient) { + final SystemEntityClient systemEntityClient, + final SystemMetadataService systemMetadataService, + final EntitySearchService entitySearchService, + final GraphService graphClient) { final List steps = new ArrayList<>(); - steps.add(new GMSDisableWriteModeStep(entityClient)); - steps.add(new ClearSearchServiceStep(searchClient, true)); + steps.add(new GMSDisableWriteModeStep(systemEntityClient)); + steps.add(new ClearSystemMetadataServiceStep(systemMetadataService, true)); + steps.add(new ClearSearchServiceStep(entitySearchService, true)); steps.add(new ClearGraphServiceStep(graphClient, true)); steps.add(new ClearAspectV2TableStep(server)); steps.add(new RestoreStorageStep(entityService, entityRegistry)); - steps.add(new GMSEnableWriteModeStep(entityClient)); + steps.add(new GMSEnableWriteModeStep(systemEntityClient)); return steps; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 42f7f0073e59b5..eb0b24acc1ac37 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -35,19 +34,20 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; private static final int DEFAULT_THREAD_POOL = 4; - private final EntityService _entityService; + private final EntityService _entityService; private final EntityRegistry _entityRegistry; - private final Map>>> _backupReaders; + private final Map>>> + _backupReaders; private final ExecutorService _fileReaderThreadPool; private final ExecutorService _gmsThreadPool; - public RestoreStorageStep(final EntityService entityService, final EntityRegistry entityRegistry) { + public RestoreStorageStep( + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -82,7 +82,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting backup restore..."); int numRows = 0; Optional backupReaderName = context.parsedArgs().get("BACKUP_READER"); @@ -93,19 +92,32 @@ public Function executable() { return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } - Class> clazz = _backupReaders.get(backupReaderName.get()); + Class> clazz = + _backupReaders.get(backupReaderName.get()); List argNames = BackupReaderArgs.getArgNames(clazz); - List> args = argNames.stream().map(argName -> context.parsedArgs().get(argName)).collect( - Collectors.toList()); + List> args = + argNames.stream() + .map(argName -> context.parsedArgs().get(argName)) + .collect(Collectors.toList()); BackupReader backupReader; try { backupReader = clazz.getConstructor(List.class).newInstance(args); - } catch (InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { + } catch (InstantiationException + | InvocationTargetException + | IllegalAccessException + | NoSuchMethodException e) { e.printStackTrace(); - context.report().addLine("Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); - throw new IllegalArgumentException("Invalid BackupReader: " + clazz.getSimpleName() + ", need to implement proper constructor."); + context + .report() + .addLine( + "Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); + throw new IllegalArgumentException( + "Invalid BackupReader: " + + clazz.getSimpleName() + + ", need to implement proper constructor."); } - EbeanAspectBackupIterator iterator = backupReader.getBackupIterator(context); + EbeanAspectBackupIterator iterator = + backupReader.getBackupIterator(context); ReaderWrapper reader; List> futureList = new ArrayList<>(); while ((reader = iterator.getNextReader()) != null) { @@ -138,9 +150,12 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), e); + String.format( + "Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), + e); continue; } @@ -150,8 +165,11 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format("Failed to find Entity with name %s in Entity Registry", entityName), + e); continue; } final String aspectName = aspect.getKey().getAspect(); @@ -160,11 +178,18 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toSystemAspect( + context.opContext().getRetrieverContext().get(), aspect.toEntityAspect()) + .get() + .getRecordTemplate(); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to create aspect record with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to create aspect record with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } @@ -173,17 +198,31 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectSpec = entitySpec.getAspectSpec(aspectName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } // 5. Write the row back using the EntityService final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); - futureList.add(_gmsThreadPool.submit(() -> - _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); + futureList.add( + _gmsThreadPool.submit( + () -> + _entityService + .ingestAspects( + context.opContext(), + urn, + List.of(Pair.of(aspectName, aspectRecord)), + auditStamp, + null) + .get(0) + .getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 7ea1811adfdd81..c6839c0e63f055 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -3,12 +3,13 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import javax.annotation.Nonnull; - /** - * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be - * ingested back into GMS. Must have a constructor that takes a List of Optional Strings + * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 + * object to be ingested back into GMS. Must have a constructor that takes a List of Optional + * Strings */ public interface BackupReader { + String getName(); @Nonnull diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java index 20f43b5414ddd7..6176d56fbec958 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java @@ -4,14 +4,9 @@ import java.util.List; import java.util.Map; - -/** - * Retains a map of what arguments are passed in to a backup reader - */ +/** Retains a map of what arguments are passed in to a backup reader */ public final class BackupReaderArgs { - private BackupReaderArgs() { - - } + private BackupReaderArgs() {} private static final Map, List> ARGS_MAP; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java index 3a2505311e2450..cce5928277a20d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java @@ -7,10 +7,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * Base interface for iterators that retrieves EbeanAspectV2 objects - * This allows us to restore from backups of various format + * Base interface for iterators that retrieves EbeanAspectV2 objects This allows us to restore from + * backups of various format */ @Slf4j @RequiredArgsConstructor @@ -35,12 +34,13 @@ public T getNextReader() { @Override public void close() { - _readers.forEach(reader -> { - try { - reader.close(); - } catch (IOException e) { - log.error("Error while closing parquet reader", e); - } - }); + _readers.forEach( + reader -> { + try { + reader.close(); + } catch (IOException e) { + log.error("Error while closing parquet reader", e); + } + }); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java index 9b8a3133ac04cb..9f0f81f466cfa4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java @@ -14,10 +14,7 @@ import org.apache.parquet.avro.AvroParquetReader; import org.apache.parquet.hadoop.ParquetReader; - -/** - * BackupReader for retrieving EbeanAspectV2 objects from a local parquet file - */ +/** BackupReader for retrieving EbeanAspectV2 objects from a local parquet file */ @Slf4j public class LocalParquetReader implements BackupReader { @@ -46,16 +43,20 @@ public String getName() { public EbeanAspectBackupIterator getBackupIterator(UpgradeContext context) { Optional path = context.parsedArgs().get("BACKUP_FILE_PATH"); if (!path.isPresent()) { - context.report().addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); + context + .report() + .addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); throw new IllegalArgumentException( "BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); } try { - ParquetReader reader = AvroParquetReader.builder(new Path(path.get())).build(); - return new EbeanAspectBackupIterator<>(ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); + ParquetReader reader = + AvroParquetReader.builder(new Path(path.get())).build(); + return new EbeanAspectBackupIterator<>( + ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); } catch (IOException e) { throw new RuntimeException(String.format("Failed to build ParquetReader: %s", e)); } } -} \ No newline at end of file +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java index 2b7cacff652495..01c502221f77f9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java @@ -15,9 +15,9 @@ @Slf4j public class ParquetReaderWrapper extends ReaderWrapper { - private final static long NANOS_PER_MILLISECOND = 1000000; - private final static long MILLIS_IN_DAY = 86400000; - private final static long JULIAN_EPOCH_OFFSET_DAYS = 2440588; + private static final long NANOS_PER_MILLISECOND = 1000000; + private static final long MILLIS_IN_DAY = 86400000; + private static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588; private final ParquetReader _parquetReader; @@ -45,22 +45,30 @@ EbeanAspectV2 convertRecord(GenericRecord record) { ts = (Long) record.get("createdon"); } - return new EbeanAspectV2(record.get("urn").toString(), record.get("aspect").toString(), - (Long) record.get("version"), record.get("metadata").toString(), - Timestamp.from(Instant.ofEpochMilli(ts / 1000)), record.get("createdby").toString(), + return new EbeanAspectV2( + record.get("urn").toString(), + record.get("aspect").toString(), + (Long) record.get("version"), + record.get("metadata").toString(), + Timestamp.from(Instant.ofEpochMilli(ts / 1000)), + record.get("createdby").toString(), Optional.ofNullable(record.get("createdfor")).map(Object::toString).orElse(null), Optional.ofNullable(record.get("systemmetadata")).map(Object::toString).orElse(null)); } private long convertFixed96IntToTs(GenericFixed createdon) { // From https://github.com/apache/parquet-format/pull/49/filesParquetTimestampUtils.java - // and ParquetTimestampUtils.java from https://github.com/kube-reporting/presto/blob/master/presto-parquet/ + // and ParquetTimestampUtils.java from + // https://github.com/kube-reporting/presto/blob/master/presto-parquet/ // src/main/java/io/prestosql/parquet/ParquetTimestampUtils.java byte[] bytes = createdon.bytes(); // little endian encoding - need to invert byte order - long timeOfDayNanos = Longs.fromBytes(bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); + long timeOfDayNanos = + Longs.fromBytes( + bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); int julianDay = Ints.fromBytes(bytes[11], bytes[10], bytes[9], bytes[8]); - return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + (timeOfDayNanos / NANOS_PER_MILLISECOND); + return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + + (timeOfDayNanos / NANOS_PER_MILLISECOND); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java index d0db42e678eea2..48d0fa2fda04c3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java @@ -5,9 +5,10 @@ import java.io.IOException; import lombok.extern.slf4j.Slf4j; - /** - * Abstract class that reads entries from a given source and transforms then into {@link EbeanAspectV2} instances. + * Abstract class that reads entries from a given source and transforms then into {@link + * EbeanAspectV2} instances. + * * @param The object type to read from a reader source. */ @Slf4j @@ -69,9 +70,15 @@ record = read(); abstract EbeanAspectV2 convertRecord(T record); private void printStat(String prefix) { - log.info("{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," - + " records failed: {}, Total millis in convert: {}", prefix, _fileName, - recordsProcessed, totalTimeSpentInRead / 1000 / 1000, recordsSkipped, recordsFailed, + log.info( + "{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," + + " records failed: {}, Total millis in convert: {}", + prefix, + _fileName, + recordsProcessed, + totalTimeSpentInRead / 1000 / 1000, + recordsSkipped, + recordsFailed, totalTimeSpentInConvert / 1000 / 1000); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 3c0a9762a28c92..9d239a56224862 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -6,14 +6,15 @@ import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.common.steps.ClearGraphServiceStep; import com.linkedin.datahub.upgrade.common.steps.ClearSearchServiceStep; +import com.linkedin.datahub.upgrade.common.steps.ClearSystemMetadataServiceStep; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.EntitySearchService; +import com.linkedin.metadata.systemmetadata.SystemMetadataService; import io.ebean.Database; import java.util.ArrayList; import java.util.List; - +import javax.annotation.Nullable; public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; @@ -24,15 +25,25 @@ public class RestoreIndices implements Upgrade { public static final String WRITER_POOL_SIZE = "WRITER_POOL_SIZE"; public static final String URN_ARG_NAME = "urn"; public static final String URN_LIKE_ARG_NAME = "urnLike"; + public static final String URN_BASED_PAGINATION_ARG_NAME = "urnBasedPagination"; public static final String STARTING_OFFSET_ARG_NAME = "startingOffset"; private final List _steps; - public RestoreIndices(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + public RestoreIndices( + @Nullable final Database server, + final EntityService entityService, + final SystemMetadataService systemMetadataService, + final EntitySearchService entitySearchService, final GraphService graphService) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + if (server != null) { + _steps = + buildSteps( + server, entityService, systemMetadataService, entitySearchService, graphService); + } else { + _steps = List.of(); + } } @Override @@ -45,13 +56,17 @@ public List steps() { return _steps; } - private List buildSteps(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + private List buildSteps( + final Database server, + final EntityService entityService, + final SystemMetadataService systemMetadataService, + final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); + steps.add(new ClearSystemMetadataServiceStep(systemMetadataService, false)); steps.add(new ClearSearchServiceStep(entitySearchService, false)); steps.add(new ClearGraphServiceStep(graphService, false)); - steps.add(new SendMAEStep(server, entityService, entityRegistry)); + steps.add(new SendMAEStep(server, entityService)); return steps; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index 2ac4fea2e653ac..77d988f3176f29 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.restoreindices; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -8,10 +10,8 @@ import com.linkedin.metadata.entity.ebean.EbeanAspectV2; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; -import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; - import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -22,10 +22,9 @@ import java.util.concurrent.Future; import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - +@Slf4j public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -33,24 +32,31 @@ public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_STARTING_OFFSET = 0; private static final int DEFAULT_THREADS = 1; + private static final boolean DEFAULT_URN_BASED_PAGINATION = false; private final Database _server; - private final EntityService _entityService; + private final EntityService _entityService; public class KafkaJob implements Callable { - UpgradeContext context; - RestoreIndicesArgs args; - public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { - this.context = context; - this.args = args; - } - @Override - public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); - } + UpgradeContext context; + RestoreIndicesArgs args; + + public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { + this.context = context; + this.args = args; + } + + @Override + public RestoreIndicesResult call() { + return _entityService + .restoreIndices(context.opContext(), args, context.report()::addLine) + .stream() + .findFirst() + .get(); + } } - public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep(final Database server, final EntityService entityService) { _server = server; _entityService = entityService; } @@ -67,13 +73,13 @@ public int retryCount() { private List iterateFutures(List> futures) { List result = new ArrayList<>(); - for (Future future: new ArrayList<>(futures)) { + for (Future future : new ArrayList<>(futures)) { if (future.isDone()) { try { result.add(future.get()); futures.remove(future); } catch (InterruptedException | ExecutionException e) { - e.printStackTrace(); + log.error("Error iterating futures", e); } } } @@ -83,26 +89,47 @@ private List iterateFutures(List countExp = - _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); + _server + .find(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); if (args.aspectName != null) { countExp = countExp.eq(EbeanAspectV2.ASPECT_COLUMN, args.aspectName); } @@ -120,45 +147,88 @@ public Function executable() { return (context) -> { RestoreIndicesResult finalJobResult = new RestoreIndicesResult(); RestoreIndicesArgs args = getArgs(context); - ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); + ThreadPoolExecutor executor = + (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); context.report().addLine("Sending MAE from local DB"); long startTime = System.currentTimeMillis(); final int rowCount = getRowCount(args); - context.report().addLine(String.format("Found %s latest aspects in aspects table in %.2f minutes.", - rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); + context + .report() + .addLine( + String.format( + "Found %s latest aspects in aspects table in %.2f minutes.", + rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); int start = args.start; List> futures = new ArrayList<>(); startTime = System.currentTimeMillis(); - while (start < rowCount) { - args = args.clone(); - args.start = start; - futures.add(executor.submit(new KafkaJob(context, args))); - start = start + args.batchSize; - } - while (futures.size() > 0) { - List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult: tmpResults) { - reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + if (args.urnBasedPagination) { + RestoreIndicesResult previousResult = null; + int rowsProcessed = 1; + while (rowsProcessed > 0) { + args = args.clone(); + if (previousResult != null) { + args.lastUrn = previousResult.lastUrn; + args.lastAspect = previousResult.lastAspect; + } + args.start = start; + context + .report() + .addLine( + String.format( + "Getting next batch of urns + aspects, starting with %s - %s", + args.lastUrn, args.lastAspect)); + Future future = executor.submit(new KafkaJob(context, args)); + try { + RestoreIndicesResult result = future.get(); + reportStats(context, finalJobResult, result, rowCount, startTime); + previousResult = result; + rowsProcessed = result.rowsMigrated + result.ignored; + context.report().addLine(String.format("Rows processed this loop %d", rowsProcessed)); + start += args.batchSize; + } catch (InterruptedException | ExecutionException e) { + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + } + } else { + while (start < rowCount) { + args = args.clone(); + args.start = start; + futures.add(executor.submit(new KafkaJob(context, args))); + start = start + args.batchSize; + } + while (futures.size() > 0) { + List tmpResults = iterateFutures(futures); + for (RestoreIndicesResult tmpResult : tmpResults) { + reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + } } } + executor.shutdown(); if (finalJobResult.rowsMigrated != rowCount) { float percentFailed = 0.0f; if (rowCount > 0) { percentFailed = (float) (rowCount - finalJobResult.rowsMigrated) * 100 / rowCount; } - context.report().addLine(String.format( - "Failed to send MAEs for %d rows (%.2f%% of total).", - rowCount - finalJobResult.rowsMigrated, percentFailed)); + context + .report() + .addLine( + String.format( + "Failed to send MAEs for %d rows (%.2f%% of total).", + rowCount - finalJobResult.rowsMigrated, percentFailed)); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } - private static void reportStats(UpgradeContext context, RestoreIndicesResult finalResult, RestoreIndicesResult tmpResult, - int rowCount, long startTime) { + private static void reportStats( + UpgradeContext context, + RestoreIndicesResult finalResult, + RestoreIndicesResult tmpResult, + int rowCount, + long startTime) { finalResult.ignored += tmpResult.ignored; finalResult.rowsMigrated += tmpResult.rowsMigrated; finalResult.timeSqlQueryMs += tmpResult.timeSqlQueryMs; @@ -178,11 +248,22 @@ private static void reportStats(UpgradeContext context, RestoreIndicesResult fin estimatedTimeMinutesComplete = timeSoFarMinutes * (100 - percentSent) / percentSent; } float totalTimeComplete = timeSoFarMinutes + estimatedTimeMinutesComplete; - context.report().addLine(String.format( - "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", - finalResult.rowsMigrated, rowCount, percentSent, finalResult.ignored, percentIgnored)); - context.report().addLine(String.format("%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", - timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); + context + .report() + .addLine( + String.format( + "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", + finalResult.rowsMigrated, + rowCount, + percentSent, + finalResult.ignored, + percentIgnored)); + context + .report() + .addLine( + String.format( + "%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", + timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); } private int getBatchSize(final Map> parsedArgs) { @@ -196,7 +277,8 @@ private int getStartingOffset(final Map> parsedArgs) { private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (containsKey(parsedArgs, RestoreIndices.BATCH_DELAY_MS_ARG_NAME)) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } @@ -205,7 +287,17 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } - private int getInt(final Map> parsedArgs, int defaultVal, String argKey) { + private boolean getUrnBasedPagination(final Map> parsedArgs) { + boolean urnBasedPagination = DEFAULT_URN_BASED_PAGINATION; + if (containsKey(parsedArgs, RestoreIndices.URN_BASED_PAGINATION_ARG_NAME)) { + urnBasedPagination = + Boolean.parseBoolean(parsedArgs.get(RestoreIndices.URN_BASED_PAGINATION_ARG_NAME).get()); + } + return urnBasedPagination; + } + + private int getInt( + final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; if (containsKey(parsedArgs, argKey)) { result = Integer.parseInt(parsedArgs.get(argKey).get()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java new file mode 100644 index 00000000000000..27e98259c8beb5 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/AbstractMCLStep.java @@ -0,0 +1,152 @@ +package com.linkedin.datahub.upgrade.system; + +import static com.linkedin.metadata.Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.entity.ebean.PartitionedStream; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.utils.AuditStampUtils; +import com.linkedin.util.Pair; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; + +/** + * Generic upgrade step class for generating MCLs for a given aspect in order to update ES documents + */ +@Slf4j +public abstract class AbstractMCLStep implements UpgradeStep { + private final OperationContext opContext; + private final EntityService entityService; + private final AspectDao aspectDao; + + private final int batchSize; + private final int batchDelayMs; + private final int limit; + + public AbstractMCLStep( + OperationContext opContext, + EntityService entityService, + AspectDao aspectDao, + Integer batchSize, + Integer batchDelayMs, + Integer limit) { + this.opContext = opContext; + this.entityService = entityService; + this.aspectDao = aspectDao; + this.batchSize = batchSize; + this.batchDelayMs = batchDelayMs; + this.limit = limit; + } + + @Nonnull + protected abstract String getAspectName(); + + protected Urn getUpgradeIdUrn() { + return BootstrapStep.getUpgradeUrn(id()); + } + + /** Optionally apply an urn-like sql filter, otherwise all urns */ + @Nullable + protected abstract String getUrnLike(); + + @Override + public Function executable() { + return (context) -> { + + // re-using for configuring the sql scan + RestoreIndicesArgs args = + new RestoreIndicesArgs().aspectName(getAspectName()).batchSize(batchSize).limit(limit); + + if (getUrnLike() != null) { + args = args.urnLike(getUrnLike()); + } + + try (PartitionedStream stream = aspectDao.streamAspectBatches(args)) { + stream + .partition(args.batchSize) + .forEach( + batch -> { + log.info("Processing batch({}) of size {}.", getAspectName(), batchSize); + + List, Boolean>> futures; + + futures = + EntityUtils.toSystemAspectFromEbeanAspects( + opContext.getRetrieverContext().get(), + batch.collect(Collectors.toList())) + .stream() + .map( + systemAspect -> + entityService.alwaysProduceMCLAsync( + opContext, + systemAspect.getUrn(), + systemAspect.getUrn().getEntityType(), + getAspectName(), + systemAspect.getAspectSpec(), + null, + systemAspect.getRecordTemplate(), + null, + systemAspect + .getSystemMetadata() + .setRunId(id()) + .setLastObserved(System.currentTimeMillis()), + AuditStampUtils.createDefaultAuditStamp(), + ChangeType.UPSERT)) + .collect(Collectors.toList()); + + futures.forEach( + f -> { + try { + f.getFirst().get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + if (batchDelayMs > 0) { + log.info("Sleeping for {} ms", batchDelayMs); + try { + Thread.sleep(batchDelayMs); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + }); + } + + BootstrapStep.setUpgradeResult(opContext, getUpgradeIdUrn(), entityService); + context.report().addLine("State updated: " + getUpgradeIdUrn()); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + @Override + /** Returns whether the upgrade should be skipped. */ + public boolean skip(UpgradeContext context) { + boolean previouslyRun = + entityService.exists( + opContext, getUpgradeIdUrn(), DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return previouslyRun; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java new file mode 100644 index 00000000000000..4fae5b2239d11e --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/BlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface BlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java new file mode 100644 index 00000000000000..fd83f1544a0982 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/NonBlockingSystemUpgrade.java @@ -0,0 +1,5 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.Upgrade; + +public interface NonBlockingSystemUpgrade extends Upgrade {} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index 4a8211f2cd4ace..ad1c6c98fa3fd1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -3,52 +3,48 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeCleanupStep; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; -import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; -import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import lombok.extern.slf4j.Slf4j; - +import java.util.LinkedList; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - +import javax.annotation.Nullable; +import lombok.Getter; +import lombok.NonNull; +import lombok.experimental.Accessors; +import lombok.extern.slf4j.Slf4j; +@Getter @Slf4j +@Accessors(fluent = true) public class SystemUpdate implements Upgrade { - private final List _preStartupUpgrades; - private final List _postStartupUpgrades; - private final List _steps; + private final List steps; + private final List cleanupSteps; - public SystemUpdate(final BuildIndices buildIndicesJob, final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate( + @NonNull final List blockingSystemUpgrades, + @NonNull final List nonBlockingSystemUpgrades, + @Nullable final DataHubStartupStep dataHubStartupStep) { - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } + steps = new LinkedList<>(); + cleanupSteps = new LinkedList<>(); - @Override - public String id() { - return "SystemUpdate"; - } + // blocking upgrades + steps.addAll(blockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + blockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); - @Override - public List steps() { - return Stream.concat(Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), - _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); + // emit system update message if blocking upgrade(s) present + if (dataHubStartupStep != null && !blockingSystemUpgrades.isEmpty()) { + steps.add(dataHubStartupStep); } - @Override - public List cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); - } + // add non-blocking upgrades last + steps.addAll(nonBlockingSystemUpgrades.stream().flatMap(up -> up.steps().stream()).toList()); + cleanupSteps.addAll( + nonBlockingSystemUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()).toList()); + } + + @Override + public String id() { + return getClass().getSimpleName(); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java new file mode 100644 index 00000000000000..32841149c467b3 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateBlocking extends SystemUpdate { + + public SystemUpdateBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java new file mode 100644 index 00000000000000..3309babc1f6cf2 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdateNonBlocking.java @@ -0,0 +1,16 @@ +package com.linkedin.datahub.upgrade.system; + +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; +import java.util.List; +import lombok.NonNull; +import org.jetbrains.annotations.Nullable; + +public class SystemUpdateNonBlocking extends SystemUpdate { + + public SystemUpdateNonBlocking( + @NonNull List blockingSystemUpgrades, + @NonNull List nonBlockingSystemUpgrades, + @Nullable DataHubStartupStep dataHubStartupStep) { + super(blockingSystemUpgrades, nonBlockingSystemUpgrades, dataHubStartupStep); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java new file mode 100644 index 00000000000000..16c039e2a64abd --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2.java @@ -0,0 +1,41 @@ +package com.linkedin.datahub.upgrade.system.browsepaths; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; + +public class BackfillBrowsePathsV2 implements NonBlockingSystemUpgrade { + + private final List _steps; + + public BackfillBrowsePathsV2( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new BackfillBrowsePathsV2Step( + opContext, entityService, searchService, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return "BackfillBrowsePathsV2"; + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java new file mode 100644 index 00000000000000..a1d559d05ad2fb --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/browsepaths/BackfillBrowsePathsV2Step.java @@ -0,0 +1,251 @@ +package com.linkedin.datahub.upgrade.system.browsepaths; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.SystemMetadataUtils.createDefaultSystemMetadata; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.BrowsePathsV2; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.aspect.utils.DefaultAspectsUtil; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.context.OperationContext; +import java.util.Set; +import java.util.function.Function; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class BackfillBrowsePathsV2Step implements UpgradeStep { + + private static final String UPGRADE_ID = "BackfillBrowsePathsV2Step"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; + + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); + + private final OperationContext opContext; + private final EntityService entityService; + private final SearchService searchService; + + private final boolean reprocessEnabled; + private final Integer batchSize; + + public BackfillBrowsePathsV2Step( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean reprocessEnabled, + Integer batchSize) { + this.opContext = opContext; + this.searchService = searchService; + this.entityService = entityService; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; + } + + @Override + public Function executable() { + return (context) -> { + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + String scrollId = null; + for (String entityType : ENTITY_TYPES_TO_MIGRATE) { + int migratedCount = 0; + do { + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + batchSize, entityType)); + scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); + migratedCount += batchSize; + } while (scrollId != null); + } + + BootstrapStep.setUpgradeResult(context.opContext(), UPGRADE_ID_URN, entityService); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) { + + final Filter filter; + + if (reprocessEnabled) { + filter = backfillDefaultBrowsePathsV2Filter(); + } else { + filter = backfillBrowsePathsV2Filter(); + } + + final ScrollResult scrollResult = + searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + batchSize); + + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { + return null; + } + + for (SearchEntity searchEntity : scrollResult.getEntities()) { + try { + ingestBrowsePathsV2(opContext, searchEntity.getEntity(), auditStamp); + } catch (Exception e) { + // don't stop the whole step because of one bad urn or one bad ingestion + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); + } + } + + return scrollResult.getScrollId(); + } + + private Filter backfillBrowsePathsV2Filter() { + // Condition: has `browsePaths` AND does NOT have `browsePathV2` + Criterion missingBrowsePathV2 = new Criterion(); + missingBrowsePathV2.setCondition(Condition.IS_NULL); + missingBrowsePathV2.setField("browsePathV2"); + // Excludes entities without browsePaths + Criterion hasBrowsePathV1 = new Criterion(); + hasBrowsePathV1.setCondition(Condition.EXISTS); + hasBrowsePathV1.setField("browsePaths"); + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(missingBrowsePathV2); + criterionArray.add(hasBrowsePathV1); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private Filter backfillDefaultBrowsePathsV2Filter() { + // Condition: has default `browsePathV2` + Criterion hasDefaultBrowsePathV2 = new Criterion(); + hasDefaultBrowsePathV2.setCondition(Condition.EQUAL); + hasDefaultBrowsePathV2.setField("browsePathV2"); + StringArray values = new StringArray(); + values.add(DEFAULT_BROWSE_PATH_V2); + hasDefaultBrowsePathV2.setValues(values); + hasDefaultBrowsePathV2.setValue(DEFAULT_BROWSE_PATH_V2); // not used, but required field? + + CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(hasDefaultBrowsePathV2); + + ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + conjunctiveCriterionArray.add(conjunctiveCriterion); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private void ingestBrowsePathsV2( + @Nonnull OperationContext opContext, Urn urn, AuditStamp auditStamp) throws Exception { + BrowsePathsV2 browsePathsV2 = + DefaultAspectsUtil.buildDefaultBrowsePathV2(opContext, urn, true, entityService); + log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); + MetadataChangeProposal proposal = new MetadataChangeProposal(); + proposal.setEntityUrn(urn); + proposal.setEntityType(urn.getEntityType()); + proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); + proposal.setChangeType(ChangeType.UPSERT); + proposal.setSystemMetadata(createDefaultSystemMetadata()); + proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); + entityService.ingestProposal(opContext, proposal, auditStamp, true); + } + + @Override + public String id() { + return UPGRADE_ID; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return true; + } + + @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables REPROCESS_DEFAULT_BROWSE_PATHS_V2 & BACKFILL_BROWSE_PATHS_V2 to determine whether to + * skip. + */ + public boolean skip(UpgradeContext context) { + boolean envEnabled = Boolean.parseBoolean(System.getenv("BACKFILL_BROWSE_PATHS_V2")); + + if (reprocessEnabled && envEnabled) { + return false; + } + + boolean previouslyRun = + entityService.exists( + context.opContext(), UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return (previouslyRun || !envEnabled); + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 1da5b6d6a25cee..a91bba2fa0a976 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -1,65 +1,131 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; +import static com.linkedin.metadata.Constants.STATUS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME; +import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTY_ENTITY_NAME; + +import com.datahub.util.RecordUtils; +import com.linkedin.common.Status; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityAspect; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.util.ArrayList; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +public class BuildIndices implements BlockingSystemUpgrade { -public class BuildIndices implements Upgrade { + private final List _steps; - private final List _steps; + public BuildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao) { - public BuildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - final ConfigurationProvider configurationProvider) { + _steps = + buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider, aspectDao); + } + @Override + public String id() { + return "BuildIndices"; + } - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + @Override + public List steps() { + return _steps; + } - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); - } + private List buildSteps( + final List indexedServices, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao) { - @Override - public String id() { - return "BuildIndices"; + final Set> structuredProperties; + if (configurationProvider.getStructuredProperties().isSystemUpdateEnabled()) { + structuredProperties = getActiveStructuredPropertiesDefinitions(aspectDao); + } else { + structuredProperties = Set.of(); } - @Override - public List steps() { - return _steps; - } - - private List buildSteps(final List indexedServices, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + final List steps = new ArrayList<>(); + // Disable ES write mode/change refresh rate and clone indices + steps.add( + new BuildIndicesPreStep( + baseElasticSearchComponents, + indexedServices, + configurationProvider, + structuredProperties)); + // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService + steps.add(new BuildIndicesStep(indexedServices, structuredProperties)); + // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in + // pre-configure step if it already exists? + steps.add( + new BuildIndicesPostStep( + baseElasticSearchComponents, indexedServices, structuredProperties)); + return steps; + } - final List steps = new ArrayList<>(); - // Disable ES write mode/change refresh rate and clone indices - steps.add(new BuildIndicesPreStep(baseElasticSearchComponents, indexedServices, configurationProvider)); - // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService - steps.add(new BuildIndicesStep(indexedServices)); - // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in pre-configure step if it already exists? - steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); - return steps; + static Set> getActiveStructuredPropertiesDefinitions( + AspectDao aspectDao) { + Set removedStructuredPropertyUrns; + try (Stream stream = + aspectDao.streamAspects(STRUCTURED_PROPERTY_ENTITY_NAME, STATUS_ASPECT_NAME)) { + removedStructuredPropertyUrns = + stream + .map( + entityAspect -> + Pair.of( + entityAspect.getUrn(), + RecordUtils.toRecordTemplate(Status.class, entityAspect.getMetadata()))) + .filter(status -> status.getSecond().isRemoved()) + .map(Pair::getFirst) + .collect(Collectors.toSet()); } + try (Stream stream = + aspectDao.streamAspects( + STRUCTURED_PROPERTY_ENTITY_NAME, STRUCTURED_PROPERTY_DEFINITION_ASPECT_NAME)) { + return stream + .map( + entityAspect -> + Pair.of( + UrnUtils.getUrn(entityAspect.getUrn()), + RecordUtils.toRecordTemplate( + StructuredPropertyDefinition.class, entityAspect.getMetadata()))) + .filter( + definition -> !removedStructuredPropertyUrns.contains(definition.getKey().toString())) + .collect(Collectors.toSet()); + } + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index 1fb9c8526ad3b0..96aea906b021e4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -1,50 +1,70 @@ package com.linkedin.datahub.upgrade.system.elasticsearch; -import com.linkedin.datahub.upgrade.Upgrade; +import static com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices.getActiveStructuredPropertiesDefinitions; + +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.CleanIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; +import com.linkedin.metadata.entity.AspectDao; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import lombok.extern.slf4j.Slf4j; - +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j -public class CleanIndices implements Upgrade { - private final List _steps; +public class CleanIndices implements NonBlockingSystemUpgrade { + private final List _steps; - public CleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public CleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider, + final AspectDao aspectDao) { - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + final Set> structuredProperties; + if (configurationProvider.getStructuredProperties().isSystemUpdateEnabled()) { + structuredProperties = getActiveStructuredPropertiesDefinitions(aspectDao); + } else { + structuredProperties = Set.of(); + } + + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - _steps = List.of(new CleanIndicesStep( + _steps = + List.of( + new CleanIndicesStep( baseElasticSearchComponents.getSearchClient(), configurationProvider.getElasticSearch(), - indexedServices)); - } + indexedServices, + structuredProperties)); + } - @Override - public String id() { - return "CleanIndices"; - } + @Override + public String id() { + return "CleanIndices"; + } - @Override - public List steps() { - return _steps; - } + @Override + public List steps() { + return _steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java index 2feca1f27e6258..09f65c84480279 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -9,26 +13,25 @@ import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.client.RequestOptions; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPostStep implements UpgradeStep { - private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents _esComponents; - private final List _services; + private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents esComponents; + private final List services; + private final Set> structuredProperties; @Override public String id() { @@ -45,8 +48,9 @@ public Function executable() { return (context) -> { try { - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + getAllReindexConfigs(services, structuredProperties).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); // Reset write blocking @@ -56,12 +60,26 @@ public Function executable() { request.settings(indexSettings); boolean ack = - _esComponents.getSearchClient().indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexConfig.name(), false); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + ack = + IndexUtils.validateWriteBlock( + esComponents.getSearchClient(), indexConfig.name(), false); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); } if (!ack) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index 82b9428c89fb8f..983e7f0c97f38b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -8,15 +12,16 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; - +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.io.IOException; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.OpenSearchStatusException; @@ -24,16 +29,13 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.indices.ResizeRequest; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPreStep implements UpgradeStep { - private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents _esComponents; - private final List _services; - private final ConfigurationProvider _configurationProvider; + private final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents esComponents; + private final List services; + private final ConfigurationProvider configurationProvider; + private final Set> structuredProperties; @Override public String id() { @@ -49,30 +51,42 @@ public int retryCount() { public Function executable() { return (context) -> { try { + final List reindexConfigs = + getAllReindexConfigs(services, structuredProperties); + // Get indices to update - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + reindexConfigs.stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); for (ReindexConfig indexConfig : indexConfigs) { - String indexName = IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); + String indexName = + IndexUtils.resolveAlias(esComponents.getSearchClient(), indexConfig.name()); boolean ack = blockWrites(indexName); if (!ack) { - log.error("Partial index settings update, some indices may still be blocking writes." + log.error( + "Partial index settings update, some indices may still be blocking writes." + " Please fix the error and re-run the BuildIndices upgrade job."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // Clone indices - if (_configurationProvider.getElasticSearch().getBuildIndices().isCloneIndices()) { + if (configurationProvider.getElasticSearch().getBuildIndices().isCloneIndices()) { String clonedName = indexConfig.name() + "_clone_" + System.currentTimeMillis(); ResizeRequest resizeRequest = new ResizeRequest(clonedName, indexName); boolean cloneAck = - _esComponents.getSearchClient().indices().clone(resizeRequest, RequestOptions.DEFAULT).isAcknowledged(); + esComponents + .getSearchClient() + .indices() + .clone(resizeRequest, RequestOptions.DEFAULT) + .isAcknowledged(); log.info("Cloned index {} into {}, Acknowledged: {}", indexName, clonedName, cloneAck); if (!cloneAck) { - log.error("Partial index settings update, cloned indices may need to be cleaned up: {}", clonedName); + log.error( + "Partial index settings update, cloned indices may need to be cleaned up: {}", + clonedName); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } @@ -85,8 +99,6 @@ public Function executable() { }; } - - private boolean blockWrites(String indexName) throws InterruptedException, IOException { UpdateSettingsRequest request = new UpdateSettingsRequest(indexName); Map indexSettings = ImmutableMap.of(INDEX_BLOCKS_WRITE_SETTING, "true"); @@ -94,13 +106,23 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc request.settings(indexSettings); boolean ack; try { - ack = _esComponents.getSearchClient().indices() - .putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = + esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } catch (OpenSearchStatusException | IOException ese) { - // Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps. + // Cover first run case, indices won't exist so settings updates won't work nor will the rest + // of the preConfigure steps. // Since no data are in there they are skippable. - // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :( + // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way + // to extract it :( if (ese.getMessage().contains("index_not_found")) { return true; } else { @@ -109,8 +131,12 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc } if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexName, true); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = IndexUtils.validateWriteBlock(esComponents.getSearchClient(), indexName, true); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } return ack; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java index ef59f2998929e3..5cf370162a3125 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java @@ -1,22 +1,25 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.shared.ElasticSearchIndexed; - +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.util.List; +import java.util.Set; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class BuildIndicesStep implements UpgradeStep { - private final List _services; + private final List services; + private final Set> structuredProperties; @Override public String id() { @@ -32,8 +35,8 @@ public int retryCount() { public Function executable() { return (context) -> { try { - for (ElasticSearchIndexed service : _services) { - service.reindexAll(); + for (ElasticSearchIndexed service : services) { + service.reindexAll(structuredProperties); } } catch (Exception e) { log.error("BuildIndicesStep failed.", e); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java index bb042bac6df955..fd5592c4ead25e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java @@ -1,54 +1,62 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.shared.ElasticSearchIndexed; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; - +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; import java.util.List; +import java.util.Set; import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; +import org.opensearch.client.RestHighLevelClient; @Slf4j public class CleanIndicesStep implements UpgradeStep { - private final RestHighLevelClient searchClient; - private final ElasticSearchConfiguration esConfig; - private final List indexedServices; - - public CleanIndicesStep(final RestHighLevelClient searchClient, final ElasticSearchConfiguration esConfig, - final List indexedServices) { - this.searchClient = searchClient; - this.esConfig = esConfig; - this.indexedServices = indexedServices; - } - - @Override - public String id() { - return "CleanUpIndicesStep"; - } - - @Override - public int retryCount() { - return 0; - } - - @Override - public Function executable() { - return (context) -> { - try { - IndexUtils.getAllReindexConfigs(indexedServices) - .forEach(reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); - } catch (Exception e) { - log.error("CleanUpIndicesStep failed.", e); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } + private final RestHighLevelClient searchClient; + private final ElasticSearchConfiguration esConfig; + private final List indexedServices; + private final Set> structuredProperties; + + public CleanIndicesStep( + final RestHighLevelClient searchClient, + final ElasticSearchConfiguration esConfig, + final List indexedServices, + final Set> structuredProperties) { + this.searchClient = searchClient; + this.esConfig = esConfig; + this.indexedServices = indexedServices; + this.structuredProperties = structuredProperties; + } + + @Override + public String id() { + return "CleanUpIndicesStep"; + } + + @Override + public int retryCount() { + return 0; + } + + @Override + public Function executable() { + return (context) -> { + try { + IndexUtils.getAllReindexConfigs(indexedServices, structuredProperties) + .forEach( + reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); + } catch (Exception e) { + log.error("CleanUpIndicesStep failed.", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index 1e568f1e9a9fec..d2b5965a3109ce 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -6,12 +6,10 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; +import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.function.Function; - - @RequiredArgsConstructor @Slf4j public class DataHubStartupStep implements UpgradeStep { @@ -32,10 +30,10 @@ public int retryCount() { public Function executable() { return (context) -> { try { - DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent() - .setVersion(_version); + DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = + new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); - log.info("Initiating startup for version: {}", _version); + log.info("System Update finished for version: {}", _version); } catch (Exception e) { log.error("DataHubStartupStep failed.", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index d9788448444eda..99d72776ff788b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -1,7 +1,15 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.util; +import com.linkedin.common.urn.Urn; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import com.linkedin.structured.StructuredPropertyDefinition; +import com.linkedin.util.Pair; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -11,27 +19,25 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - - @Slf4j public class IndexUtils { public static final String INDEX_BLOCKS_WRITE_SETTING = "index.blocks.write"; public static final int INDEX_BLOCKS_WRITE_RETRY = 4; public static final int INDEX_BLOCKS_WRITE_WAIT_SECONDS = 10; - private IndexUtils() { } + + private IndexUtils() {} private static List _reindexConfigs = new ArrayList<>(); - public static List getAllReindexConfigs(List elasticSearchIndexedList) throws IOException { + public static List getAllReindexConfigs( + List elasticSearchIndexedList, + Collection> structuredProperties) + throws IOException { // Avoid locking & reprocessing List reindexConfigs = new ArrayList<>(_reindexConfigs); if (reindexConfigs.isEmpty()) { for (ElasticSearchIndexed elasticSearchIndexed : elasticSearchIndexedList) { - reindexConfigs.addAll(elasticSearchIndexed.buildReindexConfigs()); + reindexConfigs.addAll(elasticSearchIndexed.buildReindexConfigs(structuredProperties)); } _reindexConfigs = new ArrayList<>(reindexConfigs); } @@ -39,19 +45,24 @@ public static List getAllReindexConfigs(List 0) { - GetSettingsResponse response = esClient.indices().getSettings(request, RequestOptions.DEFAULT); - if (response.getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING).equals(String.valueOf(expectedState))) { + GetSettingsResponse response = + esClient.indices().getSettings(request, RequestOptions.DEFAULT); + if (response + .getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING) + .equals(String.valueOf(expectedState))) { return true; } count = count - 1; @@ -64,20 +75,20 @@ public static boolean validateWriteBlock(RestHighLevelClient esClient, String in return false; } - public static String resolveAlias(RestHighLevelClient esClient, String indexName) throws IOException { + public static String resolveAlias(RestHighLevelClient esClient, String indexName) + throws IOException { String finalIndexName = indexName; - GetAliasesResponse aliasResponse = esClient.indices() - .getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); + GetAliasesResponse aliasResponse = + esClient.indices().getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); if (!aliasResponse.getAliases().isEmpty()) { Set indices = aliasResponse.getAliases().keySet(); if (indices.size() != 1) { throw new NotImplementedException( - String.format("Clone not supported for %s indices in alias %s. Indices: %s", - indices.size(), - indexName, - String.join(",", indices))); + String.format( + "Clone not supported for %s indices in alias %s. Indices: %s", + indices.size(), indexName, String.join(",", indices))); } finalIndexName = indices.stream().findFirst().get(); log.info("Alias {} resolved to index {}", indexName, finalIndexName); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java deleted file mode 100644 index e213c0b2fd4dec..00000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; - -import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.Upgrade; -import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.search.SearchService; -import java.util.List; - - -public class BackfillBrowsePathsV2 implements Upgrade { - - private final List _steps; - - public BackfillBrowsePathsV2(EntityService entityService, SearchService searchService) { - _steps = ImmutableList.of(new BackfillBrowsePathsV2Step(entityService, searchService)); - } - - @Override - public String id() { - return "BackfillBrowsePathsV2"; - } - - @Override - public List steps() { - return _steps; - } -} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java deleted file mode 100644 index 7547186ccfb230..00000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ /dev/null @@ -1,167 +0,0 @@ -package com.linkedin.datahub.upgrade.system.entity.steps; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.BrowsePathsV2; -import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.upgrade.UpgradeContext; -import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.UpgradeStepResult; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.search.ScrollResult; -import com.linkedin.metadata.search.SearchEntity; -import com.linkedin.metadata.search.SearchService; -import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.MetadataChangeProposal; -import com.linkedin.mxe.SystemMetadata; -import java.util.function.Function; -import lombok.extern.slf4j.Slf4j; - -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - - -@Slf4j -public class BackfillBrowsePathsV2Step implements UpgradeStep { - - public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - - private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); - private static final Integer BATCH_SIZE = 5000; - - private final EntityService _entityService; - private final SearchService _searchService; - - public BackfillBrowsePathsV2Step(EntityService entityService, SearchService searchService) { - _searchService = searchService; - _entityService = entityService; - } - - @Override - public Function executable() { - return (context) -> { - final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); - - String scrollId = null; - for (String entityType : ENTITY_TYPES_TO_MIGRATE) { - int migratedCount = 0; - do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", migratedCount, - migratedCount + BATCH_SIZE, entityType)); - scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); - migratedCount += BATCH_SIZE; - } while (scrollId != null); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } - - private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, String scrollId) { - - // Condition: has `browsePaths` AND does NOT have `browsePathV2` - Criterion missingBrowsePathV2 = new Criterion(); - missingBrowsePathV2.setCondition(Condition.IS_NULL); - missingBrowsePathV2.setField("browsePathV2"); - // Excludes entities without browsePaths - Criterion hasBrowsePathV1 = new Criterion(); - hasBrowsePathV1.setCondition(Condition.EXISTS); - hasBrowsePathV1.setField("browsePaths"); - - CriterionArray criterionArray = new CriterionArray(); - criterionArray.add(missingBrowsePathV2); - criterionArray.add(hasBrowsePathV1); - - ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); - conjunctiveCriterion.setAnd(criterionArray); - - ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); - conjunctiveCriterionArray.add(conjunctiveCriterion); - - Filter filter = new Filter(); - filter.setOr(conjunctiveCriterionArray); - - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - "5m", - BATCH_SIZE, - null - ); - if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { - return null; - } - - for (SearchEntity searchEntity : scrollResult.getEntities()) { - try { - ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); - } catch (Exception e) { - // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); - } - } - - return scrollResult.getScrollId(); - } - - private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exception { - BrowsePathsV2 browsePathsV2 = _entityService.buildDefaultBrowsePathV2(urn, true); - log.debug(String.format("Adding browse path v2 for urn %s with value %s", urn, browsePathsV2)); - MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(urn); - proposal.setEntityType(urn.getEntityType()); - proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); - proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); - proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - false - ); - } - - @Override - public String id() { - return "BackfillBrowsePathsV2Step"; - } - - /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. - */ - @Override - public boolean isOptional() { - return true; - } - - @Override - public boolean skip(UpgradeContext context) { - return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); - } -} - diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java new file mode 100644 index 00000000000000..ca568e91928951 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFields.java @@ -0,0 +1,40 @@ +package com.linkedin.datahub.upgrade.system.policyfields; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.search.SearchService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; + +public class BackfillPolicyFields implements NonBlockingSystemUpgrade { + private final List _steps; + + public BackfillPolicyFields( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean enabled, + boolean reprocessEnabled, + Integer batchSize) { + if (enabled) { + _steps = + ImmutableList.of( + new BackfillPolicyFieldsStep( + opContext, entityService, searchService, reprocessEnabled, batchSize)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return "BackfillPolicyFields"; + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java new file mode 100644 index 00000000000000..c65a45aefc357f --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/policyfields/BackfillPolicyFieldsStep.java @@ -0,0 +1,258 @@ +package com.linkedin.datahub.upgrade.system.policyfields; + +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.SystemMetadataUtils.createDefaultSystemMetadata; + +import com.google.common.collect.ImmutableList; +import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.data.DataMap; +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; +import com.linkedin.entity.EntityResponse; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.boot.BootstrapStep; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.query.filter.Condition; +import com.linkedin.metadata.query.filter.ConjunctiveCriterion; +import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; +import com.linkedin.metadata.query.filter.Criterion; +import com.linkedin.metadata.query.filter.CriterionArray; +import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchService; +import com.linkedin.policy.DataHubPolicyInfo; +import io.datahubproject.metadata.context.OperationContext; +import java.net.URISyntaxException; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.function.Function; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; + +/** + * This bootstrap step is responsible for upgrading DataHub policy documents with new searchable + * fields in ES + */ +@Slf4j +public class BackfillPolicyFieldsStep implements UpgradeStep { + private static final String UPGRADE_ID = "BackfillPolicyFieldsStep_V2"; + private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); + + private final OperationContext opContext; + private final boolean reprocessEnabled; + private final Integer batchSize; + private final EntityService entityService; + private final SearchService _searchService; + + public BackfillPolicyFieldsStep( + OperationContext opContext, + EntityService entityService, + SearchService searchService, + boolean reprocessEnabled, + Integer batchSize) { + this.opContext = opContext; + this.entityService = entityService; + this._searchService = searchService; + this.reprocessEnabled = reprocessEnabled; + this.batchSize = batchSize; + } + + @Override + public String id() { + return UPGRADE_ID; + } + + @Override + public Function executable() { + return (context) -> { + final AuditStamp auditStamp = + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); + + String scrollId = null; + int migratedCount = 0; + do { + log.info("Upgrading batch of policies {}-{}", migratedCount, migratedCount + batchSize); + scrollId = backfillPolicies(context, auditStamp, scrollId); + migratedCount += batchSize; + } while (scrollId != null); + + BootstrapStep.setUpgradeResult(context.opContext(), UPGRADE_ID_URN, entityService); + + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } + + /** + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. + */ + @Override + public boolean isOptional() { + return true; + } + + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variables REPROCESS_DEFAULT_POLICY_FIELDS & BACKFILL_BROWSE_PATHS_V2 to determine whether to + * skip. + */ + @Override + public boolean skip(UpgradeContext context) { + + if (reprocessEnabled) { + return false; + } + + boolean previouslyRun = + entityService.exists( + context.opContext(), UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); + if (previouslyRun) { + log.info("{} was already run. Skipping.", id()); + } + return previouslyRun; + } + + private String backfillPolicies(UpgradeContext context, AuditStamp auditStamp, String scrollId) { + + final Filter filter = backfillPolicyFieldFilter(); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + opContext.withSearchFlags( + flags -> + flags + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)), + ImmutableList.of(Constants.POLICY_ENTITY_NAME), + "*", + filter, + null, + scrollId, + null, + batchSize); + + if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().isEmpty()) { + return null; + } + + List> futures = new LinkedList<>(); + for (SearchEntity searchEntity : scrollResult.getEntities()) { + try { + ingestPolicyFields(context, searchEntity.getEntity(), auditStamp).ifPresent(futures::add); + } catch (Exception e) { + // don't stop the whole step because of one bad urn or one bad ingestion + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); + } + } + + futures.forEach( + f -> { + try { + f.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e); + } + }); + + return scrollResult.getScrollId(); + } + + private Filter backfillPolicyFieldFilter() { + // Condition: Does not have at least 1 of: `privileges`, `editable`, `state`, `type`, `users`, + // `groups`, `allUsers` + // `allGroups` or `roles` + ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); + + conjunctiveCriterionArray.add(getCriterionForMissingField("privilege")); + conjunctiveCriterionArray.add(getCriterionForMissingField("editable")); + conjunctiveCriterionArray.add(getCriterionForMissingField("state")); + conjunctiveCriterionArray.add(getCriterionForMissingField("type")); + conjunctiveCriterionArray.add(getCriterionForMissingField("users")); + conjunctiveCriterionArray.add(getCriterionForMissingField("groups")); + conjunctiveCriterionArray.add(getCriterionForMissingField("roles")); + conjunctiveCriterionArray.add(getCriterionForMissingField("allUsers")); + conjunctiveCriterionArray.add(getCriterionForMissingField("allGroups")); + + Filter filter = new Filter(); + filter.setOr(conjunctiveCriterionArray); + return filter; + } + + private Optional> ingestPolicyFields( + UpgradeContext context, Urn urn, AuditStamp auditStamp) { + EntityResponse entityResponse = null; + try { + entityResponse = + entityService.getEntityV2( + context.opContext(), + urn.getEntityType(), + urn, + Collections.singleton(DATAHUB_POLICY_INFO_ASPECT_NAME)); + } catch (URISyntaxException e) { + log.error( + String.format( + "Error getting DataHub Policy Info for entity with urn %s while restating policy information", + urn), + e); + } + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DATAHUB_POLICY_INFO_ASPECT_NAME)) { + final DataMap dataMap = + entityResponse.getAspects().get(DATAHUB_POLICY_INFO_ASPECT_NAME).getValue().data(); + final DataHubPolicyInfo infoAspect = new DataHubPolicyInfo(dataMap); + + log.debug("Restating policy information for urn {} with value {}", urn, infoAspect); + return Optional.of( + entityService + .alwaysProduceMCLAsync( + context.opContext(), + urn, + urn.getEntityType(), + DATAHUB_POLICY_INFO_ASPECT_NAME, + opContext + .getEntityRegistry() + .getAspectSpecs() + .get(DATAHUB_POLICY_INFO_ASPECT_NAME), + null, + infoAspect, + null, + createDefaultSystemMetadata(), + auditStamp, + ChangeType.RESTATE) + .getFirst()); + } + + return Optional.empty(); + } + + @NotNull + private static ConjunctiveCriterion getCriterionForMissingField(String field) { + final Criterion missingPrivilegesField = new Criterion(); + missingPrivilegesField.setCondition(Condition.IS_NULL); + missingPrivilegesField.setField(field); + + final CriterionArray criterionArray = new CriterionArray(); + criterionArray.add(missingPrivilegesField); + final ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); + conjunctiveCriterion.setAnd(criterionArray); + return conjunctiveCriterion; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java new file mode 100644 index 00000000000000..fc0b44f57ab494 --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLL.java @@ -0,0 +1,49 @@ +package com.linkedin.datahub.upgrade.system.vianodes; + +import com.google.common.collect.ImmutableList; +import com.linkedin.datahub.upgrade.UpgradeStep; +import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityService; +import io.datahubproject.metadata.context.OperationContext; +import java.util.List; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; + +/** + * A job that reindexes all datajob inputoutput aspects as part of the via node upgrade. This is + * required to index column-level lineage correctly using via nodes. + */ +@Slf4j +public class ReindexDataJobViaNodesCLL implements NonBlockingSystemUpgrade { + + private final List _steps; + + public ReindexDataJobViaNodesCLL( + @Nonnull OperationContext opContext, + EntityService entityService, + AspectDao aspectDao, + boolean enabled, + Integer batchSize, + Integer batchDelayMs, + Integer limit) { + if (enabled) { + _steps = + ImmutableList.of( + new ReindexDataJobViaNodesCLLStep( + opContext, entityService, aspectDao, batchSize, batchDelayMs, limit)); + } else { + _steps = ImmutableList.of(); + } + } + + @Override + public String id() { + return this.getClass().getName(); + } + + @Override + public List steps() { + return _steps; + } +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java new file mode 100644 index 00000000000000..cf580670ee3a9a --- /dev/null +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/vianodes/ReindexDataJobViaNodesCLLStep.java @@ -0,0 +1,57 @@ +package com.linkedin.datahub.upgrade.system.vianodes; + +import static com.linkedin.metadata.Constants.*; + +import com.linkedin.datahub.upgrade.UpgradeContext; +import com.linkedin.datahub.upgrade.system.AbstractMCLStep; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityService; +import io.datahubproject.metadata.context.OperationContext; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.Nullable; + +@Slf4j +public class ReindexDataJobViaNodesCLLStep extends AbstractMCLStep { + + public ReindexDataJobViaNodesCLLStep( + OperationContext opContext, + EntityService entityService, + AspectDao aspectDao, + Integer batchSize, + Integer batchDelayMs, + Integer limit) { + super(opContext, entityService, aspectDao, batchSize, batchDelayMs, limit); + } + + @Override + public String id() { + return "via-node-cll-reindex-datajob-v3"; + } + + @Nonnull + @Override + protected String getAspectName() { + return DATA_JOB_INPUT_OUTPUT_ASPECT_NAME; + } + + @Nullable + @Override + protected String getUrnLike() { + return "urn:li:" + DATA_JOB_ENTITY_NAME + ":%"; + } + + @Override + /** + * Returns whether the upgrade should be skipped. Uses previous run history or the environment + * variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT to determine whether to skip. + */ + public boolean skip(UpgradeContext context) { + boolean envFlagRecommendsSkip = + Boolean.parseBoolean(System.getenv("SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT")); + if (envFlagRecommendsSkip) { + log.info("Environment variable SKIP_REINDEX_DATA_JOB_INPUT_OUTPUT is set to true. Skipping."); + } + return (super.skip(context) || envFlagRecommendsSkip); + } +} diff --git a/datahub-upgrade/src/main/resources/application.properties b/datahub-upgrade/src/main/resources/application.properties new file mode 100644 index 00000000000000..b884c92f74bd48 --- /dev/null +++ b/datahub-upgrade/src/main/resources/application.properties @@ -0,0 +1,5 @@ +management.health.elasticsearch.enabled=false +management.health.neo4j.enabled=false +ingestion.enabled=false +spring.main.allow-bean-definition-overriding=true +entityClient.impl=restli diff --git a/datahub-upgrade/src/main/resources/logback.xml b/datahub-upgrade/src/main/resources/logback.xml index b934e4aa16fc08..3803dc67468466 100644 --- a/datahub-upgrade/src/main/resources/logback.xml +++ b/datahub-upgrade/src/main/resources/logback.xml @@ -10,6 +10,7 @@ scanned from multiple locations + [ignore_throttled] parameter is deprecated because frozen indices have been deprecated @@ -32,6 +33,7 @@ scanned from multiple locations + [ignore_throttled] parameter is deprecated because frozen indices have been deprecated diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index db697a40d0c6c9..8c9b72b0d88e5e 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -1,70 +1,123 @@ package com.linkedin.datahub.upgrade; +import static com.linkedin.metadata.EventUtils.RENAMED_MCL_AVRO_SCHEMA; +import static com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer.topicToSubjectName; +import static org.mockito.Mockito.mock; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; + import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateDeserializer; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer; +import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import com.linkedin.metadata.entity.EntityServiceImpl; +import com.linkedin.mxe.Topics; +import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; +import io.datahubproject.metadata.context.OperationContext; +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; +@ActiveProfiles("test") +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME, + "METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=" + Topics.METADATA_CHANGE_LOG_VERSIONED, + }, + args = {"-u", "SystemUpdate"}) +public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { -import java.util.List; -import java.util.Map; -import java.util.Optional; + @Autowired + @Named("systemUpdate") + private SystemUpdate systemUpdate; -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; + @Autowired + @Named("kafkaEventProducer") + private KafkaEventProducer kafkaEventProducer; -@ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, - properties = { - "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) -public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { + @Autowired + @Named("duheKafkaEventProducer") + private KafkaEventProducer duheKafkaEventProducer; + + @Autowired private EntityServiceImpl entityService; + + @Autowired + @Named("schemaRegistryConfig") + private SchemaRegistryConfig schemaRegistryConfig; + + @Test + public void testSystemUpdateInit() { + assertNotNull(systemUpdate); + } + + @Test + public void testSystemUpdateKafkaProducerOverride() throws RestClientException, IOException { + assertEquals(schemaRegistryConfig.getDeserializer(), MockSystemUpdateDeserializer.class); + assertEquals(schemaRegistryConfig.getSerializer(), MockSystemUpdateSerializer.class); + assertEquals(kafkaEventProducer, duheKafkaEventProducer); + assertEquals(entityService.getProducer(), duheKafkaEventProducer); + + MockSystemUpdateSerializer serializer = new MockSystemUpdateSerializer(); + serializer.configure(schemaRegistryConfig.getProperties(), false); + SchemaRegistryClient registry = serializer.getSchemaRegistryClient(); + assertEquals( + registry.getId( + topicToSubjectName(Topics.METADATA_CHANGE_LOG_VERSIONED), RENAMED_MCL_AVRO_SCHEMA), + 2); + } + + @Test + public void testSystemUpdateSend() { + UpgradeStepResult.Result result = + systemUpdate.steps().stream() + .filter(s -> s.id().equals("DataHubStartupStep")) + .findFirst() + .get() + .executable() + .apply( + new UpgradeContext() { + @Override + public Upgrade upgrade() { + return null; + } + + @Override + public List stepResults() { + return null; + } + + @Override + public UpgradeReport report() { + return null; + } + + @Override + public List args() { + return null; + } - @Autowired - @Named("systemUpdate") - private SystemUpdate systemUpdate; - - @Test - public void testSystemUpdateInit() { - assertNotNull(systemUpdate); - } - - @Test - public void testSystemUpdateSend() { - UpgradeStepResult.Result result = systemUpdate.steps().stream() - .filter(s -> s.id().equals("DataHubStartupStep")) - .findFirst().get() - .executable().apply(new UpgradeContext() { - @Override - public Upgrade upgrade() { - return null; - } - - @Override - public List stepResults() { - return null; - } - - @Override - public UpgradeReport report() { - return null; - } - - @Override - public List args() { - return null; - } - - @Override - public Map> parsedArgs() { - return null; - } - }).result(); - assertEquals("SUCCEEDED", result.toString()); - } + @Override + public Map> parsedArgs() { + return null; + } + @Override + public OperationContext opContext() { + return mock(OperationContext.class); + } + }) + .result(); + assertEquals("SUCCEEDED", result.toString()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java new file mode 100644 index 00000000000000..154b1de71f46cd --- /dev/null +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNonBlockingTest.java @@ -0,0 +1,100 @@ +package com.linkedin.datahub.upgrade; + +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.testng.Assert.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; +import com.linkedin.datahub.upgrade.system.SystemUpdateNonBlocking; +import com.linkedin.datahub.upgrade.system.vianodes.ReindexDataJobViaNodesCLL; +import com.linkedin.gms.factory.kafka.schemaregistry.SchemaRegistryConfig; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateDeserializer; +import com.linkedin.metadata.boot.kafka.MockSystemUpdateSerializer; +import com.linkedin.metadata.dao.producer.KafkaEventProducer; +import com.linkedin.metadata.entity.AspectDao; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.EntityServiceImpl; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.mxe.Topics; +import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.test.metadata.context.TestOperationContexts; +import java.util.List; +import javax.inject.Named; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; +import org.testng.annotations.Test; + +@ActiveProfiles("test") +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "BOOTSTRAP_SYSTEM_UPDATE_DATA_JOB_NODE_CLL_ENABLED=true", + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=" + Topics.DATAHUB_UPGRADE_HISTORY_TOPIC_NAME, + "METADATA_CHANGE_LOG_VERSIONED_TOPIC_NAME=" + Topics.METADATA_CHANGE_LOG_VERSIONED, + }, + args = {"-u", "SystemUpdateNonBlocking"}) +public class DatahubUpgradeNonBlockingTest extends AbstractTestNGSpringContextTests { + + @Autowired(required = false) + @Named("systemUpdateNonBlocking") + private SystemUpdateNonBlocking systemUpdateNonBlocking; + + @Autowired + @Named("schemaRegistryConfig") + private SchemaRegistryConfig schemaRegistryConfig; + + @Autowired + @Named("duheKafkaEventProducer") + private KafkaEventProducer duheKafkaEventProducer; + + @Autowired + @Named("kafkaEventProducer") + private KafkaEventProducer kafkaEventProducer; + + @Autowired private EntityServiceImpl entityService; + + @Autowired private OperationContext opContext; + + @Test + public void testSystemUpdateNonBlockingInit() { + assertNotNull(systemUpdateNonBlocking); + + // Expected system update configuration and producer + assertEquals(schemaRegistryConfig.getDeserializer(), MockSystemUpdateDeserializer.class); + assertEquals(schemaRegistryConfig.getSerializer(), MockSystemUpdateSerializer.class); + assertEquals(duheKafkaEventProducer, kafkaEventProducer); + assertEquals(entityService.getProducer(), duheKafkaEventProducer); + } + + @Test + public void testReindexDataJobViaNodesCLLPaging() { + EntityService mockService = mock(EntityService.class); + + AspectDao mockAspectDao = mock(AspectDao.class); + + ReindexDataJobViaNodesCLL cllUpgrade = + new ReindexDataJobViaNodesCLL(opContext, mockService, mockAspectDao, true, 10, 0, 0); + SystemUpdateNonBlocking upgrade = + new SystemUpdateNonBlocking(List.of(), List.of(cllUpgrade), null); + DefaultUpgradeManager manager = new DefaultUpgradeManager(); + manager.register(upgrade); + manager.execute( + TestOperationContexts.systemContextNoSearchAuthorization(), + "SystemUpdateNonBlocking", + List.of()); + verify(mockAspectDao, times(1)) + .streamAspectBatches( + eq( + new RestoreIndicesArgs() + .batchSize(10) + .limit(0) + .aspectName("dataJobInputOutput") + .urnLike("urn:li:dataJob:%"))); + } +} diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 74cde414adc2f1..dc4c3073ee351c 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -1,49 +1,49 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import static org.testng.AssertJUnit.*; + import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.BlockingSystemUpgrade; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import static org.testng.AssertJUnit.*; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) +@SpringBootTest( + args = {"-u", "SystemUpdate"}, + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("restoreIndices") - private RestoreIndices restoreIndices; - - @Autowired - @Named("buildIndices") - private BuildIndices buildIndices; - - @Autowired - private ESIndexBuilder esIndexBuilder; - - @Test - public void testRestoreIndicesInit() { - /* - This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean - */ - assertTrue(restoreIndices.steps().size() >= 3); - } - - @Test - public void testBuildIndicesInit() { - assertEquals("BuildIndices", buildIndices.id()); - assertTrue(buildIndices.steps().size() >= 3); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); - assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); - assertFalse(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); - } - + @Autowired + @Named("restoreIndices") + private RestoreIndices restoreIndices; + + @Autowired + @Named("buildIndices") + private BlockingSystemUpgrade buildIndices; + + @Autowired private ESIndexBuilder esIndexBuilder; + + @Test + public void testRestoreIndicesInit() { + /* + This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean + */ + assertTrue(restoreIndices.steps().size() >= 3); + } + + @Test + public void testBuildIndicesInit() { + assertEquals("BuildIndices", buildIndices.id()); + assertTrue(buildIndices.steps().size() >= 3); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); + assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); + assertFalse( + esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 6cc853b2c7c4d5..81d883d8ce36b7 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -1,42 +1,40 @@ package com.linkedin.datahub.upgrade; import com.linkedin.gms.factory.auth.SystemAuthenticationFactory; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.registry.SchemaRegistryService; +import com.linkedin.metadata.registry.SchemaRegistryServiceImpl; import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; +import com.linkedin.mxe.TopicConventionImpl; import io.ebean.Database; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; @TestConfiguration @Import(value = {SystemAuthenticationFactory.class}) public class UpgradeCliApplicationTestConfiguration { - @MockBean - private UpgradeCli upgradeCli; + @MockBean private UpgradeCli upgradeCli; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityService _entityService; + @MockBean private SearchService searchService; - @MockBean - private SearchService searchService; + @MockBean private GraphService graphService; - @MockBean - private GraphService graphService; + @MockBean private EntityRegistry entityRegistry; - @MockBean - private EntityRegistry entityRegistry; + @MockBean ConfigEntityRegistry configEntityRegistry; - @MockBean - ConfigEntityRegistry configEntityRegistry; + @MockBean public EntityIndexBuilders entityIndexBuilders; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @Bean + public SchemaRegistryService schemaRegistryService() { + return new SchemaRegistryServiceImpl(new TopicConventionImpl()); + } } diff --git a/datahub-web-react/.env b/datahub-web-react/.env index d503159ecaf974..7c02340752104b 100644 --- a/datahub-web-react/.env +++ b/datahub-web-react/.env @@ -1,4 +1,3 @@ -PUBLIC_URL=/assets REACT_APP_THEME_CONFIG=theme_light.config.json SKIP_PREFLIGHT_CHECK=true -BUILD_PATH=build/yarn \ No newline at end of file +REACT_APP_PROXY_TARGET=http://localhost:9002 diff --git a/datahub-web-react/.eslintrc.js b/datahub-web-react/.eslintrc.js index 3cdc747d100b5f..5627283af1af1c 100644 --- a/datahub-web-react/.eslintrc.js +++ b/datahub-web-react/.eslintrc.js @@ -1,15 +1,14 @@ module.exports = { parser: '@typescript-eslint/parser', // Specifies the ESLint parser extends: [ - 'react-app', - 'plugin:react/recommended', // Uses the recommended rules from @eslint-plugin-react - 'plugin:@typescript-eslint/recommended', // Uses the recommended rules from @typescript-eslint/eslint-plugin - 'plugin:jest/recommended', + 'airbnb', 'airbnb-typescript', 'airbnb/hooks', + 'plugin:@typescript-eslint/recommended', + 'plugin:vitest/recommended', 'prettier', - 'plugin:prettier/recommended', ], + plugins: ['@typescript-eslint', 'react-refresh'], parserOptions: { ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features sourceType: 'module', // Allows for the use of imports @@ -19,18 +18,27 @@ module.exports = { project: './tsconfig.json', }, rules: { - eqeqeq: ['error', 'always'], - 'react/destructuring-assignment': 'off', - 'no-console': 'off', - 'no-debugger': 'warn', - 'require-await': 'warn', + '@typescript-eslint/no-explicit-any': 'off', + 'arrow-body-style': 'off', + 'class-methods-use-this': 'off', + 'import/no-extraneous-dependencies': 'off', 'import/prefer-default-export': 'off', // TODO: remove this lint rule - 'import/extensions': 'off', - 'react/jsx-props-no-spreading': 'off', + 'no-console': 'off', 'no-plusplus': 'off', 'no-prototype-builtins': 'off', - 'react/require-default-props': 'off', + 'no-restricted-exports': ['off', { restrictedNamedExports: ['default', 'then'] }], 'no-underscore-dangle': 'off', + 'no-unsafe-optional-chaining': 'off', + 'prefer-exponentiation-operator': 'off', + 'prefer-regex-literals': 'off', + 'react/destructuring-assignment': 'off', + 'react/function-component-definition': 'off', + 'react/jsx-no-bind': 'off', + 'react/jsx-no-constructed-context-values': 'off', + 'react/jsx-no-useless-fragment': 'off', + 'react/jsx-props-no-spreading': 'off', + 'react/no-unstable-nested-components': 'off', + 'react/require-default-props': 'off', '@typescript-eslint/no-unused-vars': [ 'error', { @@ -38,10 +46,9 @@ module.exports = { argsIgnorePattern: '^_', }, ], - '@typescript-eslint/no-empty-interface': 'off', - "@typescript-eslint/explicit-module-boundary-types": "off", - "@typescript-eslint/no-explicit-any": 'off', - "import/no-extraneous-dependencies": 'off' + 'vitest/prefer-to-be': 'off', + '@typescript-eslint/no-use-before-define': ['error', { functions: false, classes: false }], + 'react-refresh/only-export-components': ['warn', { 'allowConstantExport': true }], }, settings: { react: { diff --git a/datahub-web-react/README.md b/datahub-web-react/README.md index 8bf592b11a0aec..560f5315b2c71f 100644 --- a/datahub-web-react/README.md +++ b/datahub-web-react/README.md @@ -51,6 +51,14 @@ need to be deployed, still at `http://localhost:9002`, to service GraphQL API re Optionally you could also start the app with the mock server without running the docker containers by executing `yarn start:mock`. See [here](src/graphql-mock/fixtures/searchResult/userSearchResult.ts#L6) for available login users. +### Testing your customizations + +There is two options to test your customizations: +* **Option 1**: Initialize the docker containers with the `quickstart.sh` script (or if any custom docker-compose file) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at `http://localhost:9002` to fetch real data. +* **Option 2**: Change the environment variable `REACT_APP_PROXY_TARGET` in the `.env` file to point to your `datahub-frontend` server (ex: https://my_datahub_host.com) and then run `yarn start` in this directory. This will start a forwarding server at `localhost:3000` that will use the `datahub-frontend` server at some domain to fetch real data. + +The option 2 is useful if you want to test your React customizations without having to run the hole DataHub stack locally. However, if you changed other components of the DataHub stack, you will need to run the hole stack locally (building the docker images) and use the option 1. + ### Functional testing In order to start a server and run frontend unit tests using react-testing-framework, run: diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index ae96ed130c1d15..05af6871715ced 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'distribution' + id 'com.github.node-gradle.node' } -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' node { @@ -16,10 +16,10 @@ node { } // Version of node to use. - version = '16.8.0' + version = '21.2.0' // Version of Yarn to use. - yarnVersion = '1.22.0' + yarnVersion = '1.22.21' // Base URL for fetching node distributions (set nodeDistBaseUrl if you have a mirror). if (project.hasProperty('nodeDistBaseUrl')) { @@ -35,7 +35,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } @@ -44,10 +44,33 @@ node { */ task yarnInstall(type: YarnTask) { args = ['install'] + + // The node_modules directory can contain built artifacts, so + // it's not really safe to cache it. + outputs.cacheIf { false } + + inputs.files( + file('yarn.lock'), + file('package.json'), + ) + outputs.dir('node_modules') } task yarnGenerate(type: YarnTask, dependsOn: yarnInstall) { args = ['run', 'generate'] + + outputs.cacheIf { true } + + inputs.files( + yarnInstall.inputs.files, + file('codegen.yml'), + project.fileTree(dir: "../datahub-graphql-core/src/main/resources/", include: "*.graphql"), + project.fileTree(dir: "src", include: "**/*.graphql"), + ) + + outputs.files( + project.fileTree(dir: "src", include: "**/*.generated.ts"), + ) } task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -55,7 +78,8 @@ task yarnServe(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { } task yarnTest(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - args = ['run', 'test', '--watchAll', 'false'] + // Explicitly runs in non-watch mode. + args = ['run', 'test', 'run'] } task yarnLint(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { @@ -68,13 +92,24 @@ task yarnLintFix(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { args = ['run', 'lint-fix'] } -task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnTest, yarnLint]) { +task yarnBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { + environment = [NODE_OPTIONS: "--max-old-space-size=3072 --openssl-legacy-provider"] args = ['run', 'build'] -} -task yarnQuickBuild(type: YarnTask, dependsOn: [yarnInstall, yarnGenerate]) { - environment = [NODE_OPTIONS: "--max-old-space-size=3072"] - args = ['run', 'build'] + outputs.cacheIf { true } + inputs.files( + file('index.html'), + project.fileTree(dir: "src"), + project.fileTree(dir: "public"), + + yarnInstall.inputs.files, + yarnGenerate.outputs.files, + + file('.env'), + file('vite.config.ts'), + file('tsconfig.json'), + ) + outputs.dir('dist') } task cleanExtraDirs { @@ -82,9 +117,7 @@ task cleanExtraDirs { delete 'dist' delete 'tmp' delete 'just' - delete 'src/types.generated.ts' - delete fileTree('../datahub-frontend/public') - delete fileTree(dir: 'src/graphql', include: '*.generated.ts') + delete fileTree(dir: 'src', include: '*.generated.ts') } clean.finalizedBy(cleanExtraDirs) @@ -93,24 +126,16 @@ configurations { } distZip { - dependsOn yarnQuickBuild - baseName 'datahub-web-react' + dependsOn yarnBuild + archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } -task copyAssets(dependsOn: distZip) { - doLast { - copy { - from zipTree(distZip.outputs.files.first()) - into "../datahub-frontend/public" - } - } -} - jar { - dependsOn distZip, copyAssets + dependsOn distZip into('public') { from zipTree(distZip.outputs.files.first()) } - classifier = 'assets' + archiveClassifier = 'assets' } +build.dependsOn jar diff --git a/datahub-web-react/codegen.yml b/datahub-web-react/codegen.yml index 96a2bd61379205..417d6a8f1c2a6a 100644 --- a/datahub-web-react/codegen.yml +++ b/datahub-web-react/codegen.yml @@ -1,16 +1,6 @@ overwrite: true schema: - - '../datahub-graphql-core/src/main/resources/app.graphql' - - '../datahub-graphql-core/src/main/resources/entity.graphql' - - '../datahub-graphql-core/src/main/resources/search.graphql' - - '../datahub-graphql-core/src/main/resources/analytics.graphql' - - '../datahub-graphql-core/src/main/resources/recommendation.graphql' - - '../datahub-graphql-core/src/main/resources/auth.graphql' - - '../datahub-graphql-core/src/main/resources/ingestion.graphql' - - '../datahub-graphql-core/src/main/resources/timeline.graphql' - - '../datahub-graphql-core/src/main/resources/tests.graphql' - - '../datahub-graphql-core/src/main/resources/step.graphql' - - '../datahub-graphql-core/src/main/resources/lineage.graphql' + - '../datahub-graphql-core/src/main/resources/*.graphql' config: scalars: Long: number @@ -20,6 +10,9 @@ generates: src/types.generated.ts: plugins: - 'typescript' + src/possibleTypes.generated.ts: + plugins: + - 'fragment-matcher' src/: preset: near-operation-file presetConfig: diff --git a/datahub-web-react/craco.config.js b/datahub-web-react/craco.config.js deleted file mode 100644 index d3ed895cf840ea..00000000000000 --- a/datahub-web-react/craco.config.js +++ /dev/null @@ -1,53 +0,0 @@ -require('dotenv').config(); -const CracoAntDesignPlugin = require('craco-antd'); -const path = require('path'); -const CopyWebpackPlugin = require('copy-webpack-plugin'); - -const themeConfig = require(`./src/conf/theme/${process.env.REACT_APP_THEME_CONFIG}`); - -function addLessPrefixToKeys(styles) { - const output = {}; - Object.keys(styles).forEach((key) => { - output[`@${key}`] = styles[key]; - }); - return output; -} - -module.exports = { - webpack: { - plugins: { - add: [ - // Self host images by copying them to the build directory - new CopyWebpackPlugin({ - patterns: [{ from: 'src/images', to: 'platforms' }], - }), - // Copy monaco-editor files to the build directory - new CopyWebpackPlugin({ - patterns: [ - { from: "node_modules/monaco-editor/min/vs/", to: "monaco-editor/vs" }, - { from: "node_modules/monaco-editor/min-maps/vs/", to: "monaco-editor/min-maps/vs" }, - ], - }), - ], - }, - }, - plugins: [ - { - plugin: CracoAntDesignPlugin, - options: { - customizeThemeLessPath: path.join(__dirname, 'src/conf/theme/global-variables.less'), - customizeTheme: addLessPrefixToKeys(themeConfig.styles), - }, - }, - ], - jest: { - configure: (jestConfig) => { - jestConfig.transformIgnorePatterns = [ - // Ensures that lib0 and y-protocol libraries are transformed through babel as well - 'node_modules/(?!(lib0|y-protocols)).+\\.(js|jsx|mjs|cjs|ts|tsx)$', - '^.+\\.module\\.(css|sass|scss)$', - ]; - return jestConfig; - }, - }, -}; diff --git a/datahub-web-react/datahub-frontend.graphql b/datahub-web-react/datahub-frontend.graphql deleted file mode 100644 index 6df3c387e14fe7..00000000000000 --- a/datahub-web-react/datahub-frontend.graphql +++ /dev/null @@ -1,389 +0,0 @@ -scalar Long - -schema { - query: Query - mutation: Mutation -} - -type Query { - dataset(urn: String!): Dataset - user(urn: String!): CorpUser - search(input: SearchInput!): SearchResults - autoComplete(input: AutoCompleteInput!): AutoCompleteResults - browse(input: BrowseInput!): BrowseResults - browsePaths(input: BrowsePathsInput!): [[String!]!] -} - -type Mutation { - logIn(username: String!, password: String!): CorpUser - updateDataset(input: DatasetUpdateInput!): Dataset -} - -input DatasetUpdateInput { - urn: String! - ownership: OwnershipUpdate -} - -input OwnershipUpdate { - owners: [OwnerUpdate!] -} - -input OwnerUpdate { - # The owner URN, eg urn:li:corpuser:1 - owner: String! - - # The owner role type - type: OwnershipType! -} - -enum OwnershipSourceType { - AUDIT - DATABASE - FILE_SYSTEM - ISSUE_TRACKING_SYSTEM - MANUAL - SERVICE - SOURCE_CONTROL - OTHER -} - -type OwnershipSource { - """ - The type of the source - """ - type: OwnershipSourceType! - - """ - A reference URL for the source - """ - url: String -} - -enum OwnershipType { - """ - A person or group that is in charge of developing the code - """ - DEVELOPER - - """ - A person or group that is owning the data - """ - DATAOWNER - - """ - A person or a group that overseas the operation, e.g. a DBA or SRE. - """ - DELEGATE - - """ - A person, group, or service that produces/generates the data - """ - PRODUCER - - """ - A person, group, or service that consumes the data - """ - CONSUMER - - """ - A person or a group that has direct business interest - """ - STAKEHOLDER -} - -type Owner { - """ - Owner object - """ - owner: CorpUser! - - """ - The type of the ownership - """ - type: OwnershipType - - """ - Source information for the ownership - """ - source: OwnershipSource -} - -type Ownership { - owners: [Owner!] - - lastModified: Long! -} - -enum FabricType { - """ - Designates development fabrics - """ - DEV - - """ - Designates early-integration (staging) fabrics - """ - EI - - """ - Designates production fabrics - """ - PROD - - """ - Designates corporation fabrics - """ - CORP -} - -enum PlatformNativeType { - """ - Table - """ - TABLE - - """ - View - """ - VIEW - - """ - Directory in file system - """ - DIRECTORY - - """ - Stream - """ - STREAM - - """ - Bucket in key value store - """ - BUCKET -} - -type PropertyTuple { - key: String! - value: String -} - -type SubTypes { - typeNames: [String!] -} - -type Dataset { - urn: String! - - platform: String! - - name: String! - - origin: FabricType! - - description: String - - uri: String - - platformNativeType: PlatformNativeType - - tags: [String!]! - - properties: [PropertyTuple!] - - createdTime: Long! - - modifiedTime: Long! - - ownership: Ownership - - subTypes: SubTypes -} - -type CorpUserInfo { - active: Boolean! - - displayName: String - - email: String! - - title: String - - manager: CorpUser - - departmentId: Long - - departmentName: String - - firstName: String - - lastName: String - - fullName: String - - countryCode: String -} - -type CorpUserEditableInfo { - aboutMe: String - - teams: [String!] - - skills: [String!] - - pictureLink: String -} - -type CorpUser { - urn: String! - - username: String! - - info: CorpUserInfo - - editableInfo: CorpUserEditableInfo -} - -type CorpGroup implements Entity { - """ - The unique user URN - """ - urn: String! - - """ - GMS Entity Type - """ - type: EntityType! - - """ - group name e.g. wherehows-dev, ask_metadata - """ - name: String - - """ - Information of the corp group - """ - info: CorpGroupInfo -} - - -type CorpGroupInfo { - """ - email of this group - """ - email: String! - - """ - owners of this group - """ - admins: [String!]! - - """ - List of ldap urn in this group. - """ - members: [String!]! - - """ - List of groups in this group. - """ - groups: [String!]! -} - -enum EntityType { - DATASET - USER - DATA_FLOW - DATA_JOB - CORP_USER - CORP_GROUP -} - -# Search Input -input SearchInput { - type: EntityType! - query: String! - start: Int - count: Int - filters: [FacetFilterInput!] -} - -input FacetFilterInput { - field: String! # Facet Field Name - value: String! # Facet Value -} - -# Search Output -type SearchResults { - start: Int! - count: Int! - total: Int! - elements: [SearchResult!]! - facets: [FacetMetadata!] -} - -union SearchResult = Dataset | CorpUser - -type FacetMetadata { - field: String! - aggregations: [AggregationMetadata!]! -} - -type AggregationMetadata { - value: String! - count: Long! -} - -# Autocomplete Input -input AutoCompleteInput { - type: EntityType! - query: String! - field: String # Field name - limit: Int - filters: [FacetFilterInput!] -} - -# Autocomplete Output -type AutoCompleteResults { - query: String! - suggestions: [String!]! -} - -# Browse Inputs -input BrowseInput { - type: EntityType! - path: [String!] - start: Int - count: Int - filters: [FacetFilterInput!] -} - -# Browse Output -type BrowseResults { - entities: [BrowseResultEntity!]! - start: Int! - count: Int! - total: Int! - metadata: BrowseResultMetadata! -} - -type BrowseResultEntity { - name: String! - urn: String! -} - -type BrowseResultMetadata { - path: [String!] - groups: [BrowseResultGroup!]! - totalNumEntities: Long! -} - -type BrowseResultGroup { - name: String! - count: Long! -} - -# Browse Paths Input -input BrowsePathsInput { - type: EntityType! - urn: String! -} diff --git a/datahub-web-react/index.html b/datahub-web-react/index.html new file mode 100644 index 00000000000000..bb86e2f350e1a3 --- /dev/null +++ b/datahub-web-react/index.html @@ -0,0 +1,31 @@ + + + + + + + + + + + + DataHub + + + +

+ + + diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index 2d9329919fdc1b..f641706c7661e8 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -4,21 +4,20 @@ "private": true, "dependencies": { "@analytics/amplitude": "0.0.3", - "@analytics/google-analytics": "^0.5.2", + "@analytics/google-analytics": "^1.0.7", "@analytics/mixpanel": "^0.3.1", "@ant-design/colors": "^5.0.0", "@ant-design/icons": "^4.3.0", "@apollo/client": "^3.3.19", - "@craco/craco": "^6.1.1", - "@data-ui/xy-chart": "^0.0.84", - "@miragejs/graphql": "^0.1.11", + "@graphql-codegen/fragment-matcher": "^5.0.0", "@monaco-editor/react": "^4.3.1", "@react-hook/window-size": "^3.0.7", + "@react-spring/web": "^9.7.3", "@remirror/pm": "^2.0.3", "@remirror/react": "^2.0.24", "@remirror/styles": "^2.0.3", - "@testing-library/jest-dom": "^5.11.6", - "@testing-library/react": "^11.2.2", + "@testing-library/jest-dom": "^6.1.4", + "@testing-library/react": "^12.0.0", "@tommoor/remove-markdown": "^0.3.2", "@types/diff": "^5.0.0", "@types/dompurify": "^2.3.3", @@ -29,34 +28,31 @@ "@types/react-router": "^5.1.8", "@types/react-router-dom": "^5.1.6", "@uiw/react-md-editor": "^3.3.4", - "@vx/axis": "^0.0.199", - "@vx/glyph": "^0.0.199", - "@vx/group": "^0.0.199", - "@vx/hierarchy": "^0.0.199", - "@vx/legend": "^0.0.199", - "@vx/scale": "^0.0.199", - "@vx/shape": "^0.0.199", - "@vx/tooltip": "^0.0.199", - "@vx/zoom": "^0.0.199", + "@visx/axis": "^3.1.0", + "@visx/curve": "^3.0.0", + "@visx/group": "^3.0.0", + "@visx/hierarchy": "^3.0.0", + "@visx/legend": "^3.2.0", + "@visx/scale": "^3.2.0", + "@visx/shape": "^3.2.0", + "@visx/xychart": "^3.2.0", + "@visx/zoom": "^3.1.1", "analytics": "^0.8.9", "antd": "4.24.7", "color-hash": "^2.0.1", - "craco-antd": "^1.19.0", "cronstrue": "^1.122.0", - "d3-scale": "^3.3.0", + "d3-scale": "^4.0.2", "dayjs": "^1.11.7", "deepmerge": "^4.2.2", "diff": "^5.0.0", "dompurify": "^2.3.8", "dotenv": "^8.2.0", "faker": "5.5.3", - "find-webpack": "2.2.1", "graphql": "^15.5.0", "graphql-tag": "2.10.3", "graphql.macro": "^1.4.2", "history": "^5.0.0", "js-cookie": "^2.2.1", - "miragejs": "^0.1.41", "moment": "^2.29.4", "moment-timezone": "^0.5.35", "monaco-editor": "^0.28.1", @@ -66,48 +62,38 @@ "react": "^17.0.0", "react-color": "^2.19.3", "react-dom": "^17.0.0", - "react-email-share-link": "^1.0.3", "react-helmet-async": "^1.3.0", "react-highlighter": "^0.4.3", "react-icons": "4.3.1", + "react-intersection-observer": "^9.5.3", "react-js-cron": "^2.1.0", - "react-router": "^5.2.0", - "react-router-dom": "^5.1.6", - "react-scripts": "4.0.3", + "react-router": "^5.3", + "react-router-dom": "^5.3", "react-syntax-highlighter": "^15.4.4", "react-visibility-sensor": "^5.1.1", - "reactour": "1.18.7", + "reactour": "^1.19.3", "remirror": "^2.0.23", - "sinon": "^11.1.1", - "start-server-and-test": "1.12.2", "styled-components": "^5.2.1", "turndown-plugin-gfm": "^1.0.2", - "typescript": "^4.1.3", + "typescript": "^4.8.4", "uuid": "^8.3.2", "virtualizedtableforantd4": "^1.2.1", "web-vitals": "^0.2.4", "yamljs": "^0.3.0" }, "scripts": { - "analyze": "source-map-explorer 'dist/static/js/*.js'", - "start": "yarn run generate && BROWSER=none REACT_APP_MOCK=false craco start", - "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", - "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", - "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", - "build": "yarn run generate && CI=false REACT_APP_MOCK=false craco build && rm -rf dist/ && cp -r build/yarn/ dist/ && rm -r build/yarn/", - "test": "craco test", - "pretest:e2e:ci": "yarn generate", - "test:e2e": "start-server-and-test start:e2e 3010", - "eject": "react-scripts eject", + "analyze": "source-map-explorer 'dist/assets/*.js'", + "start": "yarn run generate && vite", + "ec2-dev": "yarn run generate && CI=true;export CI;vite", + "build": "yarn run generate && NODE_OPTIONS='--max-old-space-size=3072 --openssl-legacy-provider' CI=false vite build", + "test": "vitest", "generate": "graphql-codegen --config codegen.yml", - "lint": "eslint . --ext .ts,.tsx --quiet", - "lint-fix": "eslint '*/**/*.{ts,tsx}' --quiet --fix" - }, - "eslintConfig": { - "extends": [ - "react-app", - "react-app/jest" - ] + "lint": "eslint . --ext .ts,.tsx --quiet && yarn format-check && yarn type-check", + "lint-fix": "eslint '*/**/*.{ts,tsx}' --quiet --fix && yarn format", + "format-check": "prettier --check src", + "format": "prettier --write src", + "type-check": "tsc --noEmit", + "type-watch": "tsc -w --noEmit" }, "browserslist": { "production": [ @@ -122,28 +108,48 @@ ] }, "devDependencies": { - "@graphql-codegen/cli": "1.20.0", + "@graphql-codegen/cli": "^5.0.0", "@graphql-codegen/near-operation-file-preset": "^1.17.13", "@graphql-codegen/typescript-operations": "1.17.13", "@graphql-codegen/typescript-react-apollo": "2.2.1", "@types/graphql": "^14.5.0", "@types/query-string": "^6.3.0", "@types/styled-components": "^5.1.7", - "@typescript-eslint/eslint-plugin": "^4.25.0", - "@typescript-eslint/parser": "^4.25.0", - "babel-loader": "8.2.2", - "copy-webpack-plugin": "6.4.1", - "eslint": "^7.27.0", - "eslint-config-airbnb-typescript": "^12.3.1", - "eslint-config-prettier": "^8.3.0", - "eslint-plugin-prettier": "^3.4.0", - "eslint-plugin-react": "^7.23.2", - "http-proxy-middleware": "2.0.0", - "prettier": "^2.3.0", - "source-map-explorer": "^2.5.2" + "@typescript-eslint/eslint-plugin": "^5.38.1", + "@typescript-eslint/parser": "^5.38.1", + "@vitejs/plugin-react": "^4.1.1", + "eslint": "^8.2.0", + "eslint-config-airbnb": "19.0.4", + "eslint-config-airbnb-typescript": "^17.0.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.25.3", + "eslint-plugin-jsx-a11y": "^6.5.1", + "eslint-plugin-react": "^7.28.0", + "eslint-plugin-react-hooks": "^4.3.0", + "eslint-plugin-react-refresh": "^0.4.6", + "eslint-plugin-vitest": "^0.3.17", + "jsdom": "^22.1.0", + "less": "^4.2.0", + "prettier": "^2.8.8", + "source-map-explorer": "^2.5.2", + "vite": "^4.5.3", + "vite-plugin-babel-macros": "^1.0.6", + "vite-plugin-static-copy": "^0.17.0", + "vite-plugin-svgr": "^4.1.0", + "vitest": "^0.34.6" }, "resolutions": { "@ant-design/colors": "6.0.0", - "refractor": "3.3.1" + "refractor": "3.3.1", + "json-schema": "0.4.0", + "@babel/traverse": ">=7.23.2", + "ansi-regex": "3.0.1", + "minimatch": "3.0.5", + "prismjs": "^1.27.0", + "nth-check": "^2.0.1", + "prosemirror-model": "1.8.2", + "prosemirror-state": "1.3.2", + "prosemirror-transform": "1.2.2", + "prosemirror-view": "1.13.4" } } diff --git a/datahub-web-react/public/favicon.ico b/datahub-web-react/public/assets/icons/favicon.ico similarity index 100% rename from datahub-web-react/public/favicon.ico rename to datahub-web-react/public/assets/icons/favicon.ico diff --git a/datahub-web-react/public/assets/logo.png b/datahub-web-react/public/assets/logo.png new file mode 120000 index 00000000000000..c570fd37bed974 --- /dev/null +++ b/datahub-web-react/public/assets/logo.png @@ -0,0 +1 @@ +logos/datahub-logo.png \ No newline at end of file diff --git a/datahub-web-react/public/assets/logos/datahub-logo.png b/datahub-web-react/public/assets/logos/datahub-logo.png new file mode 100644 index 00000000000000..028559661f06c5 Binary files /dev/null and b/datahub-web-react/public/assets/logos/datahub-logo.png differ diff --git a/datahub-web-react/public/browserconfig.xml b/datahub-web-react/public/browserconfig.xml new file mode 100644 index 00000000000000..0f5fd50ca7ce4a --- /dev/null +++ b/datahub-web-react/public/browserconfig.xml @@ -0,0 +1,9 @@ + + + + + + #020d10 + + + diff --git a/datahub-web-react/public/index.html b/datahub-web-react/public/index.html deleted file mode 100644 index ead3a0aba82cb9..00000000000000 --- a/datahub-web-react/public/index.html +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - DataHub - - - -
- - diff --git a/datahub-web-react/public/manifest.json b/datahub-web-react/public/manifest.json index 1ef8d60978d6cc..1ff1cb2a1f2693 100644 --- a/datahub-web-react/public/manifest.json +++ b/datahub-web-react/public/manifest.json @@ -3,7 +3,7 @@ "name": "DataHub", "icons": [ { - "src": "favicon.ico", + "src": "/assets/icons/favicon.ico", "sizes": "64x64 32x32 24x24 16x16", "type": "image/x-icon" } diff --git a/datahub-web-react/src/App.less b/datahub-web-react/src/App.less index a001aa103b33f6..5837d77a5a4e5c 100644 --- a/datahub-web-react/src/App.less +++ b/datahub-web-react/src/App.less @@ -1,8 +1,11 @@ -@import '~antd/dist/antd.less'; +@import 'antd/dist/antd.less'; +@import './conf/theme/global-variables.less'; @import './conf/theme/global-overrides.less'; +// Note that theme variables are also applied via an override in the +// less preprocessor configuration. @font-face { - font-family: 'Manrope'; - font-style: normal; - src: local('Mnarope'), url('./fonts/manrope.woff2') format('woff2'), + font-family: 'Manrope'; + font-style: normal; + src: local('Manrope'), url('./fonts/manrope.woff2') format('woff2'); } diff --git a/datahub-web-react/src/App.test.tsx b/datahub-web-react/src/App.test.tsx index 3b474e5ad84d19..8a391a37690098 100644 --- a/datahub-web-react/src/App.test.tsx +++ b/datahub-web-react/src/App.test.tsx @@ -1,11 +1,14 @@ import React from 'react'; -import { act } from 'react-dom/test-utils'; import { render } from '@testing-library/react'; -import App from './App'; +import { MockedProvider } from '@apollo/client/testing'; +import { InnerApp } from './App'; +import { mocks } from './Mocks'; -// eslint-disable-next-line jest/expect-expect +// eslint-disable-next-line vitest/expect-expect test('renders the app', async () => { - const promise = Promise.resolve(); - render(); - await act(() => promise); + render( + + + , + ); }); diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index b6bc608dccbbb0..2fdd7c8ed68004 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -1,41 +1,18 @@ -import React, { useEffect, useMemo, useState } from 'react'; +import React from 'react'; import Cookies from 'js-cookie'; -import { message } from 'antd'; import { BrowserRouter as Router } from 'react-router-dom'; import { ApolloClient, ApolloProvider, createHttpLink, InMemoryCache, ServerError } from '@apollo/client'; import { onError } from '@apollo/client/link/error'; -import { ThemeProvider } from 'styled-components'; import { Helmet, HelmetProvider } from 'react-helmet-async'; import './App.less'; import { Routes } from './app/Routes'; -import EntityRegistry from './app/entity/EntityRegistry'; -import { DashboardEntity } from './app/entity/dashboard/DashboardEntity'; -import { ChartEntity } from './app/entity/chart/ChartEntity'; -import { UserEntity } from './app/entity/user/User'; -import { GroupEntity } from './app/entity/group/Group'; -import { DatasetEntity } from './app/entity/dataset/DatasetEntity'; -import { DataFlowEntity } from './app/entity/dataFlow/DataFlowEntity'; -import { DataJobEntity } from './app/entity/dataJob/DataJobEntity'; -import { TagEntity } from './app/entity/tag/Tag'; -import { EntityRegistryContext } from './entityRegistryContext'; -import { Theme } from './conf/theme/types'; -import defaultThemeConfig from './conf/theme/theme_light.config.json'; import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; -import { GlossaryTermEntity } from './app/entity/glossaryTerm/GlossaryTermEntity'; -import { MLFeatureEntity } from './app/entity/mlFeature/MLFeatureEntity'; -import { MLPrimaryKeyEntity } from './app/entity/mlPrimaryKey/MLPrimaryKeyEntity'; -import { MLFeatureTableEntity } from './app/entity/mlFeatureTable/MLFeatureTableEntity'; -import { MLModelEntity } from './app/entity/mlModel/MLModelEntity'; -import { MLModelGroupEntity } from './app/entity/mlModelGroup/MLModelGroupEntity'; -import { DomainEntity } from './app/entity/domain/DomainEntity'; -import { ContainerEntity } from './app/entity/container/ContainerEntity'; -import GlossaryNodeEntity from './app/entity/glossaryNode/GlossaryNodeEntity'; -import { DataPlatformEntity } from './app/entity/dataPlatform/DataPlatformEntity'; -import { DataProductEntity } from './app/entity/dataProduct/DataProductEntity'; -import { DataPlatformInstanceEntity } from './app/entity/dataPlatformInstance/DataPlatformInstanceEntity'; -import { RoleEntity } from './app/entity/Access/RoleEntity'; +import possibleTypesResult from './possibleTypes.generated'; +import { ErrorCodes } from './app/shared/constants'; +import CustomThemeProvider from './CustomThemeProvider'; +import { useCustomTheme } from './customThemeContext'; /* Construct Apollo Client @@ -43,23 +20,24 @@ import { RoleEntity } from './app/entity/Access/RoleEntity'; const httpLink = createHttpLink({ uri: '/api/v2/graphql' }); const errorLink = onError((error) => { - const { networkError, graphQLErrors } = error; + const { networkError } = error; if (networkError) { const serverError = networkError as ServerError; - if (serverError.statusCode === 401) { + if (serverError.statusCode === ErrorCodes.Unauthorized) { isLoggedInVar(false); Cookies.remove(GlobalCfg.CLIENT_AUTH_COOKIE); const currentPath = window.location.pathname + window.location.search; window.location.replace(`${PageRoutes.AUTHENTICATE}?redirect_uri=${encodeURIComponent(currentPath)}`); } } - if (graphQLErrors && graphQLErrors.length) { - const firstError = graphQLErrors[0]; - const { extensions } = firstError; - const errorCode = extensions && (extensions.code as number); - // Fallback in case the calling component does not handle. - message.error(`${firstError.message} (code ${errorCode})`, 3); - } + // Disabled behavior for now -> Components are expected to handle their errors. + // if (graphQLErrors && graphQLErrors.length) { + // const firstError = graphQLErrors[0]; + // const { extensions } = firstError; + // const errorCode = extensions && (extensions.code as number); + // // Fallback in case the calling component does not handle. + // message.error(`${firstError.message} (code ${errorCode})`, 3); // TODO: Decide if we want this back. + // } }); const client = new ApolloClient({ @@ -74,9 +52,16 @@ const client = new ApolloClient({ return { ...oldObj, ...newObj }; }, }, + entity: { + merge: (oldObj, newObj) => { + return { ...oldObj, ...newObj }; + }, + }, }, }, }, + // need to define possibleTypes to allow us to use Apollo cache with union types + possibleTypes: possibleTypesResult.possibleTypes, }), credentials: 'include', defaultOptions: { @@ -89,57 +74,25 @@ const client = new ApolloClient({ }, }); -const App: React.VFC = () => { - const [dynamicThemeConfig, setDynamicThemeConfig] = useState(defaultThemeConfig); - - useEffect(() => { - import(`./conf/theme/${process.env.REACT_APP_THEME_CONFIG}`).then((theme) => { - setDynamicThemeConfig(theme); - }); - }, []); - - const entityRegistry = useMemo(() => { - const register = new EntityRegistry(); - register.register(new DatasetEntity()); - register.register(new DashboardEntity()); - register.register(new ChartEntity()); - register.register(new UserEntity()); - register.register(new GroupEntity()); - register.register(new TagEntity()); - register.register(new DataFlowEntity()); - register.register(new DataJobEntity()); - register.register(new GlossaryTermEntity()); - register.register(new MLFeatureEntity()); - register.register(new MLPrimaryKeyEntity()); - register.register(new MLFeatureTableEntity()); - register.register(new MLModelEntity()); - register.register(new MLModelGroupEntity()); - register.register(new DomainEntity()); - register.register(new ContainerEntity()); - register.register(new GlossaryNodeEntity()); - register.register(new RoleEntity()); - register.register(new DataPlatformEntity()); - register.register(new DataProductEntity()); - register.register(new DataPlatformInstanceEntity()); - return register; - }, []); - +export const InnerApp: React.VFC = () => { return ( - + + + {useCustomTheme().theme?.content.title} + - - {dynamicThemeConfig.content.title} - - - - - - + - + ); }; -export default App; +export const App: React.VFC = () => { + return ( + + + + ); +}; diff --git a/datahub-web-react/src/AppConfigProvider.tsx b/datahub-web-react/src/AppConfigProvider.tsx index 2984116cf20281..928b8da5626d8f 100644 --- a/datahub-web-react/src/AppConfigProvider.tsx +++ b/datahub-web-react/src/AppConfigProvider.tsx @@ -39,7 +39,11 @@ const AppConfigProvider = ({ children }: { children: React.ReactNode }) => { return ( {children} diff --git a/datahub-web-react/src/CustomThemeProvider.tsx b/datahub-web-react/src/CustomThemeProvider.tsx new file mode 100644 index 00000000000000..505c37d4c828dd --- /dev/null +++ b/datahub-web-react/src/CustomThemeProvider.tsx @@ -0,0 +1,37 @@ +import React, { useEffect, useState } from 'react'; +import { ThemeProvider } from 'styled-components'; +import { Theme } from './conf/theme/types'; +import defaultThemeConfig from './conf/theme/theme_light.config.json'; +import { CustomThemeContext } from './customThemeContext'; + +interface Props { + children: React.ReactNode; + skipSetTheme?: boolean; +} + +const CustomThemeProvider = ({ children, skipSetTheme }: Props) => { + const [currentTheme, setTheme] = useState(defaultThemeConfig); + + useEffect(() => { + if (import.meta.env.DEV) { + import(/* @vite-ignore */ `./conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`).then((theme) => { + setTheme(theme); + }); + } else if (!skipSetTheme) { + // Send a request to the server to get the theme config. + fetch(`/assets/conf/theme/${import.meta.env.REACT_APP_THEME_CONFIG}`) + .then((response) => response.json()) + .then((theme) => { + setTheme(theme); + }); + } + }, [skipSetTheme]); + + return ( + + {children} + + ); +}; + +export default CustomThemeProvider; diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index a2e14308e8cee2..aed672a34e7caf 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -29,6 +29,8 @@ import { PlatformPrivileges, FilterOperator, AppConfig, + EntityPrivileges, + BusinessAttribute, } from './types.generated'; import { GetTagDocument } from './graphql/tag.generated'; import { GetMlModelDocument } from './graphql/mlModel.generated'; @@ -39,12 +41,27 @@ import { GetMeDocument } from './graphql/me.generated'; import { ListRecommendationsDocument } from './graphql/recommendations.generated'; import { FetchedEntity } from './app/lineage/types'; import { DEFAULT_APP_CONFIG } from './appConfigContext'; +import { GetQuickFiltersDocument } from './graphql/quickFilters.generated'; +import { GetGrantedPrivilegesDocument } from './graphql/policy.generated'; +import { VIEW_ENTITY_PAGE } from './app/entity/shared/constants'; + +export const entityPrivileges: EntityPrivileges = { + canEditLineage: true, + canManageEntity: true, + canManageChildren: true, + canEditEmbed: true, + canEditQueries: true, + canEditProperties: true, + __typename: 'EntityPrivileges', +}; export const user1 = { + __typename: 'CorpUser', username: 'sdas', urn: 'urn:li:corpuser:1', type: EntityType.CorpUser, info: { + __typename: 'CorpUserInfo', email: 'sdas@domain.com', active: true, displayName: 'sdas', @@ -53,18 +70,19 @@ export const user1 = { lastName: 'Das', fullName: 'Shirshanka Das', }, - editableInfo: { - pictureLink: 'https://crunchconf.com/img/2019/speakers/1559291783-ShirshankaDas.png', - }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -74,14 +92,24 @@ export const user1 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + properties: null, + editableProperties: null, + autoRenderAspects: [], }; const user2 = { + __typename: 'CorpUser', username: 'john', urn: 'urn:li:corpuser:3', type: EntityType.CorpUser, - info: { + properties: { + __typename: 'CorpUserInfo', email: 'john@domain.com', active: true, displayName: 'john', @@ -90,25 +118,41 @@ const user2 = { lastName: 'Joyce', fullName: 'John Joyce', }, - editableInfo: { - pictureLink: null, - }, editableProperties: { displayName: 'Test', title: 'test', pictureLink: null, teams: [], skills: [], + __typename: 'CorpUserEditableProperties', + email: 'john@domain.com', + }, + groups: { + __typename: 'EntityRelationshipsResult', + relationships: [ + { + __typename: 'EntityRelationship', + entity: { + __typename: 'CorpGroup', + urn: 'urn:li:corpgroup:group1', + name: 'group1', + properties: null, + }, + }, + ], }, globalTags: { + __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -118,7 +162,13 @@ const user2 = { }, ], }, - settings: { appearance: { showSimplifiedHomepage: false }, views: { defaultView: null } }, + settings: { + __typename: 'CorpUserSettings', + appearance: { __typename: 'CorpUserAppearanceSettings', showSimplifiedHomepage: false }, + views: { __typename: 'CorpUserViewSettings', defaultView: null }, + }, + editableInfo: null, + info: null, }; export const dataPlatform = { @@ -149,6 +199,7 @@ export const dataPlatformInstance = { }; export const dataset1 = { + __typename: 'Dataset', urn: 'urn:li:dataset:1', type: EntityType.Dataset, platform: { @@ -172,9 +223,7 @@ export const dataset1 = { tags: ['Private', 'PII'], uri: 'www.google.com', privileges: { - canEditLineage: false, - canEditEmbed: false, - canEditQueries: false, + ...entityPrivileges, }, properties: { name: 'The Great Test Dataset', @@ -257,9 +306,14 @@ export const dataset1 = { statsSummary: null, embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + autoRenderAspects: [], + structuredProperties: null, + forms: null, + activeIncidents: null, }; export const dataset2 = { + __typename: 'Dataset', urn: 'urn:li:dataset:2', type: EntityType.Dataset, platform: { @@ -274,9 +328,7 @@ export const dataset2 = { type: EntityType.DataPlatform, }, privileges: { - canEditLineage: false, - canEditEmbed: false, - canEditQueries: false, + ...entityPrivileges, }, lastIngested: null, exists: true, @@ -351,6 +403,10 @@ export const dataset2 = { statsSummary: null, embed: null, browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + autoRenderAspects: [], + structuredProperties: null, + forms: null, + activeIncidents: null, }; export const dataset3 = { @@ -358,20 +414,23 @@ export const dataset3 = { urn: 'urn:li:dataset:3', type: EntityType.Dataset, platform: { + __typename: 'DataPlatform', urn: 'urn:li:dataPlatform:kafka', name: 'Kafka', + displayName: 'Kafka', info: { + __typename: 'DataPlatformInfo', displayName: 'Kafka', type: PlatformType.MessageBroker, datasetNameDelimiter: '.', logoUrl: '', }, type: EntityType.DataPlatform, + lastIngested: null, + properties: null, }, privileges: { - canEditLineage: false, - canEditEmbed: false, - canEditQueries: false, + ...entityPrivileges, }, exists: true, lastIngested: null, @@ -381,54 +440,83 @@ export const dataset3 = { origin: 'PROD', uri: 'www.google.com', properties: { + __typename: 'DatasetProperties', name: 'Yet Another Dataset', + qualifiedName: 'Yet Another Dataset', description: 'This and here we have yet another Dataset (YAN). Are there more?', origin: 'PROD', - customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:3' }], + customProperties: [ + { + __typename: 'CustomPropertiesEntry', + key: 'propertyAKey', + value: 'propertyAValue', + associatedUrn: 'urn:li:dataset:3', + }, + ], externalUrl: 'https://data.hub', + lastModified: { + __typename: 'AuditStamp', + time: 0, + actor: null, + }, }, parentContainers: { + __typename: 'ParentContainersResult', count: 0, containers: [], }, editableProperties: null, created: { + __typename: 'AuditStamp', time: 0, + actor: null, }, lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, ownership: { + __typename: 'Ownership', owners: [ { + __typename: 'Owner', owner: { ...user1, }, type: 'DATAOWNER', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, { + __typename: 'Owner', owner: { ...user2, }, type: 'DELEGATE', associatedUrn: 'urn:li:dataset:3', + ownershipType: null, }, ], lastModified: { + __typename: 'AuditStamp', time: 0, + actor: null, }, }, globalTags: { __typename: 'GlobalTags', tags: [ { + __typename: 'TagAssociation', tag: { + __typename: 'Tag', type: EntityType.Tag, urn: 'urn:li:tag:abc-sample-tag', name: 'abc-sample-tag', description: 'sample tag', properties: { + __typename: 'TagProperties', name: 'abc-sample-tag', description: 'sample tag', colorHex: 'sample tag color', @@ -439,14 +527,18 @@ export const dataset3 = { ], }, glossaryTerms: { + __typename: 'GlossaryTerms', terms: [ { + __typename: 'GlossaryTermAssociation', term: { + __typename: 'GlossaryTerm', type: EntityType.GlossaryTerm, urn: 'urn:li:glossaryTerm:sample-glossary-term', name: 'sample-glossary-term', hierarchicalName: 'example.sample-glossary-term', properties: { + __typename: 'GlossaryTermProperties', name: 'sample-glossary-term', description: 'sample definition', definition: 'sample definition', @@ -463,13 +555,21 @@ export const dataset3 = { incoming: null, outgoing: null, institutionalMemory: { + __typename: 'InstitutionalMemory', elements: [ { + __typename: 'InstitutionalMemoryMetadata', url: 'https://www.google.com', - author: { urn: 'urn:li:corpuser:datahub', username: 'datahub', type: EntityType.CorpUser }, + author: { + __typename: 'CorpUser', + urn: 'urn:li:corpuser:datahub', + username: 'datahub', + type: EntityType.CorpUser, + }, description: 'This only points to Google', label: 'This only points to Google', created: { + __typename: 'AuditStamp', actor: 'urn:li:corpuser:1', time: 1612396473001, }, @@ -482,12 +582,14 @@ export const dataset3 = { operations: null, datasetProfiles: [ { + __typename: 'DatasetProfile', rowCount: 10, columnCount: 5, sizeInBytes: 10000, timestampMillis: 0, fieldProfiles: [ { + __typename: 'DatasetFieldProfile', fieldPath: 'testColumn', uniqueCount: 1, uniqueProportion: 0.129, @@ -507,6 +609,7 @@ export const dataset3 = { viewProperties: null, autoRenderAspects: [ { + __typename: 'RawAspect', aspectName: 'autoRenderAspect', payload: '{ "values": [{ "autoField1": "autoValue1", "autoField2": "autoValue2" }] }', renderSpec: { @@ -523,13 +626,19 @@ export const dataset3 = { health: [], assertions: null, status: null, - readRuns: null, - writeRuns: null, + runs: null, testResults: null, siblings: null, statsSummary: null, embed: null, - browsePathV2: { path: [{ name: 'test', entity: null }], __typename: 'BrowsePathV2' }, + browsePathV2: { __typename: 'BrowsePathV2', path: [{ name: 'test', entity: null }] }, + access: null, + dataProduct: null, + lastProfile: null, + lastOperation: null, + structuredProperties: null, + forms: null, + activeIncidents: null, } as Dataset; export const dataset3WithSchema = { @@ -554,6 +663,7 @@ export const dataset3WithSchema = { globalTags: null, glossaryTerms: null, label: 'hi', + schemaFieldEntity: null, }, { __typename: 'SchemaField', @@ -569,6 +679,7 @@ export const dataset3WithSchema = { globalTags: null, glossaryTerms: null, label: 'hi', + schemaFieldEntity: null, }, ], hash: '', @@ -617,6 +728,7 @@ export const dataset5 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:5' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; @@ -631,6 +743,7 @@ export const dataset6 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:6' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; @@ -867,6 +980,7 @@ export const container1 = { externalUrl: null, __typename: 'ContainerProperties', }, + autoRenderAspects: [], __typename: 'Container', } as Container; @@ -881,6 +995,7 @@ export const container2 = { externalUrl: null, __typename: 'ContainerProperties', }, + autoRenderAspects: [], __typename: 'Container', } as Container; @@ -928,6 +1043,7 @@ export const glossaryTerm1 = { }, parentNodes: null, deprecation: null, + autoRenderAspects: [], } as GlossaryTerm; const glossaryTerm2 = { @@ -1000,6 +1116,7 @@ const glossaryTerm2 = { __typename: 'EntityRelationshipsResult', }, parentNodes: null, + autoRenderAspects: [], __typename: 'GlossaryTerm', }; @@ -1066,6 +1183,7 @@ const glossaryTerm3 = { __typename: 'GlossaryRelatedTerms', }, deprecation: null, + autoRenderAspects: [], __typename: 'GlossaryTerm', } as GlossaryTerm; @@ -1136,6 +1254,7 @@ export const glossaryNode5 = { export const sampleTag = { urn: 'urn:li:tag:abc-sample-tag', + type: EntityType.Tag, name: 'abc-sample-tag', description: 'sample tag description', ownership: { @@ -1162,6 +1281,7 @@ export const sampleTag = { description: 'sample tag description', colorHex: 'sample tag color', }, + autoRenderAspects: [], }; export const dataFlow1 = { @@ -1233,6 +1353,9 @@ export const dataFlow1 = { }, domain: null, deprecation: null, + autoRenderAspects: [], + activeIncidents: null, + health: [], } as DataFlow; export const dataJob1 = { @@ -1266,8 +1389,7 @@ export const dataJob1 = { }, }, privileges: { - canEditLineage: false, - canEditEmbed: false, + ...entityPrivileges, }, properties: { name: 'DataJobInfoName', @@ -1319,8 +1441,107 @@ export const dataJob1 = { domain: null, status: null, deprecation: null, + autoRenderAspects: [], + activeIncidents: null, + health: [], } as DataJob; +export const businessAttribute = { + urn: 'urn:li:businessAttribute:ba1', + type: EntityType.BusinessAttribute, + __typename: 'BusinessAttribute', + properties: { + name: 'TestBusinessAtt-2', + description: 'lorem upsum updated 12', + created: { + time: 1705857132786, + }, + lastModified: { + time: 1705857132786, + }, + glossaryTerms: { + terms: [ + { + term: { + urn: 'urn:li:glossaryTerm:1', + type: EntityType.GlossaryTerm, + hierarchicalName: 'SampleHierarchicalName', + name: 'SampleName', + }, + associatedUrn: 'urn:li:businessAttribute:ba1', + }, + ], + __typename: 'GlossaryTerms', + }, + tags: { + __typename: 'GlobalTags', + tags: [ + { + tag: { + urn: 'urn:li:tag:abc-sample-tag', + __typename: 'Tag', + type: EntityType.Tag, + name: 'abc-sample-tag', + }, + __typename: 'TagAssociation', + associatedUrn: 'urn:li:businessAttribute:ba1', + }, + { + tag: { + urn: 'urn:li:tag:TestTag', + __typename: 'Tag', + type: EntityType.Tag, + name: 'TestTag', + }, + __typename: 'TagAssociation', + associatedUrn: 'urn:li:businessAttribute:ba1', + }, + ], + }, + customProperties: [ + { + key: 'prop2', + value: 'val2', + associatedUrn: 'urn:li:businessAttribute:ba1', + __typename: 'CustomPropertiesEntry', + }, + { + key: 'prop1', + value: 'val1', + associatedUrn: 'urn:li:businessAttribute:ba1', + __typename: 'CustomPropertiesEntry', + }, + { + key: 'prop3', + value: 'val3', + associatedUrn: 'urn:li:businessAttribute:ba1', + __typename: 'CustomPropertiesEntry', + }, + ], + }, + ownership: { + owners: [ + { + owner: { + ...user1, + }, + associatedUrn: 'urn:li:businessAttribute:ba', + type: 'DATAOWNER', + }, + { + owner: { + ...user2, + }, + associatedUrn: 'urn:li:businessAttribute:ba', + type: 'DELEGATE', + }, + ], + lastModified: { + time: 0, + }, + }, +} as BusinessAttribute; + export const dataJob2 = { __typename: 'DataJob', urn: 'urn:li:dataJob:2', @@ -1328,8 +1549,7 @@ export const dataJob2 = { dataFlow: dataFlow1, jobId: 'jobId2', privileges: { - canEditLineage: false, - canEditEmbed: false, + ...entityPrivileges, }, ownership: { __typename: 'Ownership', @@ -1388,6 +1608,9 @@ export const dataJob2 = { upstream: null, downstream: null, deprecation: null, + autoRenderAspects: [], + activeIncidents: null, + health: [], } as DataJob; export const dataJob3 = { @@ -1399,8 +1622,7 @@ export const dataJob3 = { lastIngested: null, exists: true, privileges: { - canEditLineage: false, - canEditEmbed: false, + ...entityPrivileges, }, ownership: { __typename: 'Ownership', @@ -1460,6 +1682,9 @@ export const dataJob3 = { downstream: null, status: null, deprecation: null, + autoRenderAspects: [], + activeIncidents: null, + health: [], } as DataJob; export const mlModel = { @@ -1541,6 +1766,7 @@ export const mlModel = { downstream: null, status: null, deprecation: null, + autoRenderAspects: [], } as MlModel; export const dataset1FetchedEntity = { @@ -1839,7 +2065,6 @@ export const mocks = [ browse: { entities: [ { - __typename: 'Dataset', ...dataset1, }, ], @@ -1986,7 +2211,6 @@ export const mocks = [ searchResults: [ { entity: { - __typename: 'Dataset', ...dataset1, }, matchedFields: [ @@ -1999,7 +2223,6 @@ export const mocks = [ }, { entity: { - __typename: 'Dataset', ...dataset2, }, }, @@ -2075,6 +2298,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2248,6 +2472,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -2259,10 +2484,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -2270,6 +2497,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2278,12 +2506,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -2829,6 +3058,7 @@ export const mocks = [ // ], // }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -2908,6 +3138,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3205,6 +3436,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3216,10 +3448,12 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ { + __typename: 'AggregationMetadata', value: 'PROD', count: 3, entity: null, @@ -3227,6 +3461,7 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3235,12 +3470,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3290,6 +3526,7 @@ export const mocks = [ total: 1, searchResults: [ { + __typename: 'SearchResult', entity: { __typename: 'Dataset', ...dataset3, @@ -3301,6 +3538,7 @@ export const mocks = [ suggestions: [], facets: [ { + __typename: 'FacetMetadata', field: 'origin', displayName: 'origin', aggregations: [ @@ -3308,10 +3546,12 @@ export const mocks = [ value: 'PROD', count: 3, entity: null, + __typename: 'AggregationMetadata', }, ], }, { + __typename: 'FacetMetadata', field: '_entityType', displayName: 'Type', aggregations: [ @@ -3320,12 +3560,13 @@ export const mocks = [ ], }, { + __typename: 'FacetMetadata', field: 'platform', displayName: 'platform', aggregations: [ - { value: 'hdfs', count: 1, entity: null }, - { value: 'mysql', count: 1, entity: null }, - { value: 'kafka', count: 1, entity: null }, + { value: 'hdfs', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'mysql', count: 1, entity: null, __typename: 'AggregationMetadata' }, + { value: 'kafka', count: 1, entity: null, __typename: 'AggregationMetadata' }, ], }, ], @@ -3367,6 +3608,7 @@ export const mocks = [ __typename: 'AuthenticatedUser', corpUser: { ...user2 }, platformPrivileges: { + __typename: 'PlatformPrivileges', viewAnalytics: true, managePolicies: true, manageIdentities: true, @@ -3376,6 +3618,7 @@ export const mocks = [ createTags: true, manageUserCredentials: true, manageGlossaries: true, + viewTests: false, manageTests: true, manageTokens: true, manageSecrets: true, @@ -3384,6 +3627,8 @@ export const mocks = [ manageGlobalViews: true, manageOwnershipTypes: true, manageGlobalAnnouncements: true, + createBusinessAttributes: true, + manageBusinessAttributes: true, }, }, }, @@ -3593,6 +3838,31 @@ export const mocks = [ } as GetSearchResultsForMultipleQuery, }, }, + { + request: { + query: GetQuickFiltersDocument, + variables: { + input: {}, + }, + }, + result: { + data: [], + }, + }, + { + request: { + query: GetGrantedPrivilegesDocument, + variables: { + input: { + actorUrn: 'urn:li:corpuser:3', + resourceSpec: { resourceType: EntityType.Dataset, resourceUrn: dataset3.urn }, + }, + }, + }, + result: { + data: { getGrantedPrivileges: { privileges: [VIEW_ENTITY_PAGE] } }, + }, + }, ]; export const mocksWithSearchFlagsOff = [ @@ -3624,6 +3894,7 @@ export const platformPrivileges: PlatformPrivileges = { manageIngestion: true, manageSecrets: true, manageTokens: true, + viewTests: false, manageTests: true, manageGlossaries: true, manageUserCredentials: true, @@ -3633,4 +3904,6 @@ export const platformPrivileges: PlatformPrivileges = { manageGlobalViews: true, manageOwnershipTypes: true, manageGlobalAnnouncements: true, + createBusinessAttributes: true, + manageBusinessAttributes: true, }; diff --git a/datahub-web-react/src/__mocks__/styleMock.js b/datahub-web-react/src/__mocks__/styleMock.js new file mode 100644 index 00000000000000..f053ebf7976e37 --- /dev/null +++ b/datahub-web-react/src/__mocks__/styleMock.js @@ -0,0 +1 @@ +module.exports = {}; diff --git a/datahub-web-react/src/app/AdminConsole.tsx b/datahub-web-react/src/app/AdminConsole.tsx index 8b14ca35763d10..f6395a3bd3cb8a 100644 --- a/datahub-web-react/src/app/AdminConsole.tsx +++ b/datahub-web-react/src/app/AdminConsole.tsx @@ -4,9 +4,9 @@ import { Menu } from 'antd'; import styled from 'styled-components'; import { BankOutlined, BarChartOutlined, MenuOutlined } from '@ant-design/icons'; import Sider from 'antd/lib/layout/Sider'; -import { useGetAuthenticatedUser } from './useGetAuthenticatedUser'; import { useAppConfig } from './useAppConfig'; import { ANTD_GRAY } from './entity/shared/constants'; +import { useUserContext } from './context/useUserContext'; const ToggleContainer = styled.div` background-color: ${ANTD_GRAY[4]}; @@ -32,7 +32,7 @@ const ControlSlideOut = styled(Sider)` * Container for all views behind an authentication wall. */ export const AdminConsole = (): JSX.Element => { - const me = useGetAuthenticatedUser(); + const me = useUserContext(); const [adminConsoleOpen, setAdminConsoleOpen] = useState(false); const { config } = useAppConfig(); @@ -40,8 +40,8 @@ export const AdminConsole = (): JSX.Element => { const isAnalyticsEnabled = config?.analyticsConfig.enabled; const isPoliciesEnabled = config?.policiesConfig.enabled; - const showAnalytics = (isAnalyticsEnabled && me && me.platformPrivileges.viewAnalytics) || false; - const showPolicyBuilder = (isPoliciesEnabled && me && me.platformPrivileges.managePolicies) || false; + const showAnalytics = (isAnalyticsEnabled && me && me?.platformPrivileges?.viewAnalytics) || false; + const showPolicyBuilder = (isPoliciesEnabled && me && me?.platformPrivileges?.managePolicies) || false; const showAdminConsole = showAnalytics || showPolicyBuilder; const onMenuItemClick = () => { diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 1ced44048b5023..00597e1cf76406 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -4,6 +4,8 @@ import { EducationStepsProvider } from '../providers/EducationStepsProvider'; import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; +import EntityRegistryProvider from './EntityRegistryProvider'; +import { BrowserTitleProvider } from './shared/BrowserTabTitleContext'; interface Props { children: React.ReactNode; @@ -13,11 +15,15 @@ export default function AppProviders({ children }: Props) { return ( - - - {children} - - + + + + + {children} + + + + ); diff --git a/datahub-web-react/src/app/EmbedRoutes.tsx b/datahub-web-react/src/app/EmbedRoutes.tsx new file mode 100644 index 00000000000000..4ab38ed6b8acf7 --- /dev/null +++ b/datahub-web-react/src/app/EmbedRoutes.tsx @@ -0,0 +1,23 @@ +import React from 'react'; +import { Route } from 'react-router-dom'; +import { PageRoutes } from '../conf/Global'; +import EmbeddedPage from './embed/EmbeddedPage'; +import { useEntityRegistry } from './useEntityRegistry'; +import EmbedLookup from './embed/lookup'; + +export default function EmbedRoutes() { + const entityRegistry = useEntityRegistry(); + + return ( + <> + } /> + {entityRegistry.getEntities().map((entity) => ( + } + /> + ))} + + ); +} diff --git a/datahub-web-react/src/app/EntityRegistryProvider.tsx b/datahub-web-react/src/app/EntityRegistryProvider.tsx new file mode 100644 index 00000000000000..9e283c0d07fc8a --- /dev/null +++ b/datahub-web-react/src/app/EntityRegistryProvider.tsx @@ -0,0 +1,10 @@ +import React from 'react'; +import { EntityRegistryContext } from '../entityRegistryContext'; +import useBuildEntityRegistry from './useBuildEntityRegistry'; + +const EntityRegistryProvider = ({ children }: { children: React.ReactNode }) => { + const entityRegistry = useBuildEntityRegistry(); + return {children}; +}; + +export default EntityRegistryProvider; diff --git a/datahub-web-react/src/app/ProtectedRoutes.tsx b/datahub-web-react/src/app/ProtectedRoutes.tsx index 469e0d6030b352..d975e6d4d99c2d 100644 --- a/datahub-web-react/src/app/ProtectedRoutes.tsx +++ b/datahub-web-react/src/app/ProtectedRoutes.tsx @@ -1,38 +1,33 @@ -import React from 'react'; -import { Switch, Route } from 'react-router-dom'; +import React, { useEffect } from 'react'; +import { Switch, Route, useLocation, useHistory } from 'react-router-dom'; import { Layout } from 'antd'; import { HomePage } from './home/HomePage'; import { SearchRoutes } from './SearchRoutes'; -import { PageRoutes } from '../conf/Global'; -import EmbeddedPage from './embed/EmbeddedPage'; -import { useEntityRegistry } from './useEntityRegistry'; -import AppProviders from './AppProviders'; -import EmbedLookup from './embed/lookup'; +import EmbedRoutes from './EmbedRoutes'; +import { NEW_ROUTE_MAP, PageRoutes } from '../conf/Global'; +import { getRedirectUrl } from '../conf/utils'; /** * Container for all views behind an authentication wall. */ export const ProtectedRoutes = (): JSX.Element => { - const entityRegistry = useEntityRegistry(); + const location = useLocation(); + const history = useHistory(); + + useEffect(() => { + if (location.pathname.indexOf('/Validation') !== -1) { + history.replace(getRedirectUrl(NEW_ROUTE_MAP)); + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location]); return ( - - - - - } /> - } /> - {entityRegistry.getEntities().map((entity) => ( - } - /> - ))} - } /> - - - - + + + } /> + } /> + } /> + + ); }; diff --git a/datahub-web-react/src/app/Routes.tsx b/datahub-web-react/src/app/Routes.tsx index 057d9238fb9196..2b53fa32da1066 100644 --- a/datahub-web-react/src/app/Routes.tsx +++ b/datahub-web-react/src/app/Routes.tsx @@ -1,6 +1,7 @@ import React from 'react'; -import { Switch, Route, RouteProps, Redirect } from 'react-router-dom'; +import { Switch, Route, RouteProps } from 'react-router-dom'; import { useReactiveVar } from '@apollo/client'; +import AppProviders from './AppProviders'; import { LogIn } from './auth/LogIn'; import { SignUp } from './auth/SignUp'; import { ResetCredentials } from './auth/ResetCredentials'; @@ -36,9 +37,14 @@ export const Routes = (): JSX.Element => { - } /> - {/* Starting the react app locally opens /assets by default. For a smoother dev experience, we'll redirect to the homepage */} - } exact /> + ( + + + + )} + /> ); diff --git a/datahub-web-react/src/app/SearchRoutes.tsx b/datahub-web-react/src/app/SearchRoutes.tsx index d2ad4ab6f4db19..3343260c72bcf6 100644 --- a/datahub-web-react/src/app/SearchRoutes.tsx +++ b/datahub-web-react/src/app/SearchRoutes.tsx @@ -12,9 +12,9 @@ import { ManageIngestionPage } from './ingest/ManageIngestionPage'; import GlossaryRoutes from './glossary/GlossaryRoutes'; import { SettingsPage } from './settings/SettingsPage'; import DomainRoutes from './domain/DomainRoutes'; -import { useIsNestedDomainsEnabled } from './useAppConfig'; +import { useBusinessAttributesFlag, useIsAppConfigContextLoaded, useIsNestedDomainsEnabled } from './useAppConfig'; import { ManageDomainsPage } from './domain/ManageDomainsPage'; - +import { BusinessAttributes } from './businessAttribute/BusinessAttributes'; /** * Container for all searchable page routes */ @@ -25,6 +25,9 @@ export const SearchRoutes = (): JSX.Element => { ? entityRegistry.getEntitiesForSearchRoutes() : entityRegistry.getNonGlossaryEntities(); + const businessAttributesFlag = useBusinessAttributesFlag(); + const appConfigContextLoaded = useIsAppConfigContextLoaded(); + return ( @@ -50,6 +53,18 @@ export const SearchRoutes = (): JSX.Element => { } /> } /> } /> + { + if (!appConfigContextLoaded) { + return null; + } + if (businessAttributesFlag) { + return ; + } + return ; + }} + /> diff --git a/datahub-web-react/src/app/__tests__/Routes.test.tsx b/datahub-web-react/src/app/__tests__/Routes.test.tsx new file mode 100644 index 00000000000000..b1f82ef46bcf0d --- /dev/null +++ b/datahub-web-react/src/app/__tests__/Routes.test.tsx @@ -0,0 +1,18 @@ +import React from 'react'; +import { render, waitFor } from '@testing-library/react'; +import { MockedProvider } from '@apollo/client/testing'; +import { mocks } from '../../Mocks'; +import TestPageContainer from '../../utils/test-utils/TestPageContainer'; +import { Routes } from '../Routes'; + +test('renders embed page properly', async () => { + const { getByText } = render( + + + + + , + ); + + await waitFor(() => expect(getByText('Yet Another Dataset')).toBeInTheDocument()); +}); diff --git a/datahub-web-react/src/app/analytics/README.md b/datahub-web-react/src/app/analytics/README.md index 79b82bcc2a7569..881fffd59fb2c3 100644 --- a/datahub-web-react/src/app/analytics/README.md +++ b/datahub-web-react/src/app/analytics/README.md @@ -48,20 +48,17 @@ const config: any = { ### Google Analytics -**Disclaimers** - -- This plugin requires use of Universal Analytics and does not yet support GA4. To create a Universal Analytics Property, follow [this guide](https://www.analyticsmania.com/other-posts/how-to-create-a-universal-analytics-property/). -- Google Analytics lacks robust support for custom event properties. For that reason many of the DataHub events discussed above will not be fully populated. Instead, we map certain fields of the DataHub event to the standard `category`, `action`, `label` fields required by GA. - 1. Open `datahub-web-react/src/conf/analytics.ts` -2. Uncomment the `googleAnalytics` field within the `config` object. -3. Replace the sample `trackingId` with the one provided by Google Analytics. +2. Uncomment the `googleAnalytics` field within the `config`. +3. Replace the sample `measurementIds` with the one provided by Google Analytics. 4. Rebuild & redeploy `datahub-frontend-react` to start tracking. +Example: + ```typescript const config: any = { googleAnalytics: { - trackingId: 'UA-24123123-01', + measurementIds: ['G-ATV123'], }, }; ``` diff --git a/datahub-web-react/src/app/analytics/analytics.ts b/datahub-web-react/src/app/analytics/analytics.ts index a66d76a09cf4de..c4c1b473be05c9 100644 --- a/datahub-web-react/src/app/analytics/analytics.ts +++ b/datahub-web-react/src/app/analytics/analytics.ts @@ -14,7 +14,7 @@ const analytics = Analytics({ plugins: plugins.filter((plugin) => plugin.isEnabled).map((plugin) => plugin.plugin), }); -const { NODE_ENV } = process.env; +const { NODE_ENV } = import.meta.env; export function getMergedTrackingOptions(options?: any) { const isThirdPartyLoggingEnabled = JSON.parse(localStorage.getItem(THIRD_PARTY_LOGGING_KEY) || 'false'); @@ -30,16 +30,17 @@ export function getMergedTrackingOptions(options?: any) { export default { page: (data?: PageData, options?: any, callback?: (...params: any[]) => any) => { + const actorUrn = Cookies.get(CLIENT_AUTH_COOKIE) || undefined; const modifiedData = { ...data, type: EventType[EventType.PageViewEvent], - actorUrn: Cookies.get(CLIENT_AUTH_COOKIE) || undefined, + actorUrn, timestamp: Date.now(), date: new Date().toString(), userAgent: navigator.userAgent, browserId: getBrowserId(), }; - if (NODE_ENV === 'test') { + if (NODE_ENV === 'test' || !actorUrn) { return null; } const trackingOptions = getMergedTrackingOptions(options); diff --git a/datahub-web-react/src/app/analytics/event.ts b/datahub-web-react/src/app/analytics/event.ts index 27340264009336..d63b731c720426 100644 --- a/datahub-web-react/src/app/analytics/event.ts +++ b/datahub-web-react/src/app/analytics/event.ts @@ -48,6 +48,7 @@ export enum EventType { CreateResetCredentialsLinkEvent, DeleteEntityEvent, SelectUserRoleEvent, + SelectGroupRoleEvent, BatchSelectUserRoleEvent, CreatePolicyEvent, UpdatePolicyEvent, @@ -80,6 +81,7 @@ export enum EventType { EmbedProfileViewEvent, EmbedProfileViewInDataHubEvent, EmbedLookupNotFoundEvent, + CreateBusinessAttributeEvent, } /** @@ -302,6 +304,8 @@ export const EntityActionType = { UpdateSchemaTags: 'UpdateSchemaTags', UpdateSchemaTerms: 'UpdateSchemaTerms', ClickExternalUrl: 'ClickExternalUrl', + AddIncident: 'AddIncident', + ResolvedIncident: 'ResolvedIncident', }; export interface EntityActionEvent extends BaseEvent { type: EventType.EntityActionEvent; @@ -412,6 +416,12 @@ export interface SelectUserRoleEvent extends BaseEvent { userUrn: string; } +export interface SelectGroupRoleEvent extends BaseEvent { + type: EventType.SelectGroupRoleEvent; + roleUrn: string; + groupUrn?: string; +} + export interface BatchSelectUserRoleEvent extends BaseEvent { type: EventType.BatchSelectUserRoleEvent; roleUrn: string; @@ -624,6 +634,11 @@ export interface EmbedLookupNotFoundEvent extends BaseEvent { reason: EmbedLookupNotFoundReason; } +export interface CreateBusinessAttributeEvent extends BaseEvent { + type: EventType.CreateBusinessAttributeEvent; + name: string; +} + /** * Event consisting of a union of specific event types. */ @@ -668,6 +683,7 @@ export type Event = | CreateResetCredentialsLinkEvent | DeleteEntityEvent | SelectUserRoleEvent + | SelectGroupRoleEvent | BatchSelectUserRoleEvent | CreatePolicyEvent | UpdatePolicyEvent @@ -700,4 +716,5 @@ export type Event = | DeselectQuickFilterEvent | EmbedProfileViewEvent | EmbedProfileViewInDataHubEvent - | EmbedLookupNotFoundEvent; + | EmbedLookupNotFoundEvent + | CreateBusinessAttributeEvent; diff --git a/datahub-web-react/src/app/analytics/plugin/googleAnalytics.ts b/datahub-web-react/src/app/analytics/plugin/googleAnalytics.ts index f60f46513272b0..727258ee8d40f0 100644 --- a/datahub-web-react/src/app/analytics/plugin/googleAnalytics.ts +++ b/datahub-web-react/src/app/analytics/plugin/googleAnalytics.ts @@ -2,9 +2,9 @@ import googleAnalytics from '@analytics/google-analytics'; import { Event, EventType } from '../event'; import analyticsConfig from '../../../conf/analytics'; -const gaConfigs = analyticsConfig.googleAnalytics; -const isEnabled: boolean = gaConfigs || false; -const trackingId = isEnabled ? gaConfigs.trackingId : undefined; +const ga4Configs = analyticsConfig.googleAnalytics; +const isEnabled: boolean = ga4Configs || false; +const measurementIds = isEnabled ? ga4Configs.measurementIds : undefined; const getLabelFromEvent = (event: Event) => { switch (event.type) { @@ -21,11 +21,7 @@ const getLabelFromEvent = (event: Event) => { let wrappedGoogleAnalyticsPlugin; if (isEnabled) { - /** - * Init default GA plugin - */ - const googleAnalyticsPlugin = googleAnalytics({ trackingId }); - + const googleAnalyticsPlugin = googleAnalytics({ measurementIds }); /** * Lightweight wrapper on top of the default google analytics plugin * to transform DataHub Analytics Events into the Google Analytics event diff --git a/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx b/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx index f178b09afcccc9..0c9909313e27ec 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/BarChart.tsx @@ -1,8 +1,8 @@ import React, { useMemo } from 'react'; -import { BarStack } from '@vx/shape'; -import { scaleOrdinal, scaleLinear, scaleBand } from '@vx/scale'; -import { Group } from '@vx/group'; -import { AxisBottom, AxisRight } from '@vx/axis'; +import { BarStack } from '@visx/shape'; +import { scaleOrdinal, scaleLinear, scaleBand } from '@visx/scale'; +import { Group } from '@visx/group'; +import { AxisBottom, AxisRight } from '@visx/axis'; import { BarChart as BarChartType } from '../../../types.generated'; import { lineColors } from './lineColors'; @@ -85,7 +85,7 @@ export const BarChart = ({ chartData, width, height }: Props) => { - + data={transformedChartData} keys={keys} x={(data) => data.displayName} diff --git a/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx b/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx index c4ef51fb3938b3..7796713961ca3b 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/Legend.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Col, Row } from 'antd'; -import { LegendOrdinal, LegendItem, LegendLabel } from '@vx/legend'; +import { LegendOrdinal, LegendItem, LegendLabel } from '@visx/legend'; import { ScaleOrdinal } from 'd3-scale/src/ordinal'; import styled from 'styled-components'; diff --git a/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx b/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx index 6b9b808abfd0f3..68851a950bcc55 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/TimeSeriesChart.tsx @@ -1,17 +1,15 @@ import React, { useMemo } from 'react'; -import { XYChart, LineSeries, CrossHair, XAxis, YAxis } from '@data-ui/xy-chart'; -import { scaleOrdinal } from '@vx/scale'; +import styled from 'styled-components'; +import { AxisScaleOutput } from '@visx/axis'; +import { Axis, LineSeries, XYChart, Tooltip, GlyphSeries } from '@visx/xychart'; +import { curveMonotoneX } from '@visx/curve'; +import { ScaleConfig, scaleOrdinal } from '@visx/scale'; import { TimeSeriesChart as TimeSeriesChartType, NumericDataPoint, NamedLine } from '../../../types.generated'; import { lineColors } from './lineColors'; import Legend from './Legend'; import { addInterval } from '../../shared/time/timeUtils'; import { formatNumber } from '../../shared/formatNumber'; -type ScaleConfig = { - type: 'time' | 'timeUtc' | 'linear' | 'band' | 'ordinal'; - includeZero?: boolean; -}; - type AxisConfig = { formatter: (tick: number) => string; }; @@ -29,10 +27,15 @@ type Props = { crossHairLineColor?: string; }; insertBlankPoints?: boolean; - yScale?: ScaleConfig; + yScale?: ScaleConfig; yAxis?: AxisConfig; }; +const StyledTooltip = styled(Tooltip)` + font-family: inherit !important; + font-weight: 400 !important; +`; + const MARGIN = { TOP: 40, RIGHT: 45, @@ -40,6 +43,11 @@ const MARGIN = { LEFT: 40, }; +const accessors = { + xAccessor: (d) => d.x, + yAccessor: (d) => d.y, +}; + function insertBlankAt(ts: number, newLine: Array) { const dateString = new Date(ts).toISOString(); for (let i = 0; i < newLine.length; i++) { @@ -96,41 +104,61 @@ export const TimeSeriesChart = ({ return ( <> ( -
-
{new Date(Number(datum.x)).toDateString()}
-
{datum.y}
-
- )} - snapTooltipToDataX={false} + yScale={yScale ?? { type: 'linear' }} > - - + (yAxis?.formatter ? yAxis.formatter(tick) : formatNumber(tick))} + tickLabelProps={{ fill: 'black', fontFamily: 'inherit', fontSize: 10 }} + numTicks={3} /> {lines.map((line, i) => ( - ({ x: new Date(point.x).getTime().toString(), y: point.y }))} - stroke={(style && style.lineColor) || lineColors[i]} - /> + <> + ({ x: new Date(point.x), y: point.y }))} + stroke={(style && style.lineColor) || lineColors[i]} + curve={curveMonotoneX} + {...accessors} + /> + ({ x: new Date(point.x), y: point.y }))} + {...accessors} + /> + ))} - + tooltipData?.nearestDatum && ( +
+
+ {new Date( + Number(accessors.xAccessor(tooltipData.nearestDatum.datum)), + ).toDateString()} +
+
{accessors.yAccessor(tooltipData.nearestDatum.datum)}
+
+ ) + } /> {!hideLegend && } diff --git a/datahub-web-react/src/app/analyticsDashboard/components/__tests__/timeSeriesChart.test.tsx b/datahub-web-react/src/app/analyticsDashboard/components/__tests__/timeSeriesChart.test.tsx index c6f458301707d6..c528e4e627a1c5 100644 --- a/datahub-web-react/src/app/analyticsDashboard/components/__tests__/timeSeriesChart.test.tsx +++ b/datahub-web-react/src/app/analyticsDashboard/components/__tests__/timeSeriesChart.test.tsx @@ -1,9 +1,10 @@ +import { DateInterval, TimeSeriesChart } from '../../../../types.generated'; import { computeLines } from '../TimeSeriesChart'; describe('timeSeriesChart', () => { describe('computeLines', () => { it('compute lines works works correctly for weekly case', () => { - const chartData = { + const chartData: TimeSeriesChart = { title: 'Weekly Active Users', lines: [ { @@ -15,7 +16,7 @@ describe('timeSeriesChart', () => { start: '1672012800000', end: '1677369600000', }, - interval: 'WEEK', + interval: DateInterval.Week, }; const result = computeLines(chartData, true); expect(result[0]).toEqual({ @@ -35,7 +36,7 @@ describe('timeSeriesChart', () => { }); it('compute lines works works correctly for monthly case', () => { - const chartData = { + const chartData: TimeSeriesChart = { title: 'Weekly Active Users', lines: [ { @@ -51,7 +52,7 @@ describe('timeSeriesChart', () => { start: '1648771200000', end: '1680307199999', }, - interval: 'MONTH', + interval: DateInterval.Month, }; const result = computeLines(chartData, true); expect(result[0]).toEqual({ diff --git a/datahub-web-react/src/app/auth/ResetCredentials.tsx b/datahub-web-react/src/app/auth/ResetCredentials.tsx index 30d7f99d99d842..77f41489fcfc9e 100644 --- a/datahub-web-react/src/app/auth/ResetCredentials.tsx +++ b/datahub-web-react/src/app/auth/ResetCredentials.tsx @@ -41,7 +41,9 @@ const FormInput = styled(Input)` `; const StyledFormItem = styled(Form.Item)` - .ant-input-affix-wrapper-status-error:not(.ant-input-affix-wrapper-disabled):not(.ant-input-affix-wrapper-borderless).ant-input-affix-wrapper { + .ant-input-affix-wrapper-status-error:not(.ant-input-affix-wrapper-disabled):not( + .ant-input-affix-wrapper-borderless + ).ant-input-affix-wrapper { background-color: transparent; } `; diff --git a/datahub-web-react/src/app/auth/SignUp.tsx b/datahub-web-react/src/app/auth/SignUp.tsx index e57a5930ce1ff9..2eaa74946682fa 100644 --- a/datahub-web-react/src/app/auth/SignUp.tsx +++ b/datahub-web-react/src/app/auth/SignUp.tsx @@ -55,7 +55,9 @@ const TitleSelector = styled(Select)` `; const StyledFormItem = styled(Form.Item)` - .ant-input-affix-wrapper-status-error:not(.ant-input-affix-wrapper-disabled):not(.ant-input-affix-wrapper-borderless).ant-input-affix-wrapper { + .ant-input-affix-wrapper-status-error:not(.ant-input-affix-wrapper-disabled):not( + .ant-input-affix-wrapper-borderless + ).ant-input-affix-wrapper { background-color: transparent; } `; diff --git a/datahub-web-react/src/app/auth/login.module.css b/datahub-web-react/src/app/auth/login.module.css index 37cc067c9dd203..81b933062a1a74 100644 --- a/datahub-web-react/src/app/auth/login.module.css +++ b/datahub-web-react/src/app/auth/login.module.css @@ -8,7 +8,7 @@ position: absolute; top: 40%; left: 50%; - transform: translate(-50%,-50%); + transform: translate(-50%, -50%); } .login_logo_box { @@ -28,7 +28,7 @@ .login_form_box { width: 100%; - background-color: #1C1C1C; + background-color: #1c1c1c; border: 1px solid #555555; border-radius: 5px; padding: 2em; @@ -36,8 +36,8 @@ .login_button { color: #171717; - background-color: #EEEEEE; - border: 1px solid #555555; + background-color: #eeeeee; + border: 1px solid #555555; height: 40px; font-size: 14px; } @@ -45,11 +45,11 @@ .login_button:hover { color: white; background-color: transparent; - border: 1px solid #555555; + border: 1px solid #555555; } .sso_button { - color: #EEEEEE; + color: #eeeeee; background-color: #171717; border: 1px solid #555555; height: 40px; @@ -60,4 +60,4 @@ color: black; background-color: white; border: 1px solid #555555; -} \ No newline at end of file +} diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts new file mode 100644 index 00000000000000..0b70986672be51 --- /dev/null +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -0,0 +1,56 @@ +import EntityRegistry from './entity/EntityRegistry'; +import { DashboardEntity } from './entity/dashboard/DashboardEntity'; +import { ChartEntity } from './entity/chart/ChartEntity'; +import { UserEntity } from './entity/user/User'; +import { GroupEntity } from './entity/group/Group'; +import { DatasetEntity } from './entity/dataset/DatasetEntity'; +import { DataFlowEntity } from './entity/dataFlow/DataFlowEntity'; +import { DataJobEntity } from './entity/dataJob/DataJobEntity'; +import { TagEntity } from './entity/tag/Tag'; +import { GlossaryTermEntity } from './entity/glossaryTerm/GlossaryTermEntity'; +import { MLFeatureEntity } from './entity/mlFeature/MLFeatureEntity'; +import { MLPrimaryKeyEntity } from './entity/mlPrimaryKey/MLPrimaryKeyEntity'; +import { MLFeatureTableEntity } from './entity/mlFeatureTable/MLFeatureTableEntity'; +import { MLModelEntity } from './entity/mlModel/MLModelEntity'; +import { MLModelGroupEntity } from './entity/mlModelGroup/MLModelGroupEntity'; +import { DomainEntity } from './entity/domain/DomainEntity'; +import { ContainerEntity } from './entity/container/ContainerEntity'; +import GlossaryNodeEntity from './entity/glossaryNode/GlossaryNodeEntity'; +import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; +import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; +import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; +import { ERModelRelationshipEntity } from './entity/ermodelrelationships/ERModelRelationshipEntity'; +import { RoleEntity } from './entity/Access/RoleEntity'; +import { RestrictedEntity } from './entity/restricted/RestrictedEntity'; +import { BusinessAttributeEntity } from './entity/businessAttribute/BusinessAttributeEntity'; +import { SchemaFieldPropertiesEntity } from './entity/schemaField/SchemaFieldPropertiesEntity'; + +export default function buildEntityRegistry() { + const registry = new EntityRegistry(); + registry.register(new DatasetEntity()); + registry.register(new DashboardEntity()); + registry.register(new ChartEntity()); + registry.register(new UserEntity()); + registry.register(new GroupEntity()); + registry.register(new TagEntity()); + registry.register(new DataFlowEntity()); + registry.register(new DataJobEntity()); + registry.register(new GlossaryTermEntity()); + registry.register(new MLFeatureEntity()); + registry.register(new MLPrimaryKeyEntity()); + registry.register(new MLFeatureTableEntity()); + registry.register(new MLModelEntity()); + registry.register(new MLModelGroupEntity()); + registry.register(new DomainEntity()); + registry.register(new ContainerEntity()); + registry.register(new GlossaryNodeEntity()); + registry.register(new RoleEntity()); + registry.register(new DataPlatformEntity()); + registry.register(new DataProductEntity()); + registry.register(new DataPlatformInstanceEntity()); + registry.register(new ERModelRelationshipEntity()); + registry.register(new RestrictedEntity()); + registry.register(new BusinessAttributeEntity()); + registry.register(new SchemaFieldPropertiesEntity()); + return registry; +} diff --git a/datahub-web-react/src/app/businessAttribute/AttributeBrowser.tsx b/datahub-web-react/src/app/businessAttribute/AttributeBrowser.tsx new file mode 100644 index 00000000000000..4d8f722aec9883 --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/AttributeBrowser.tsx @@ -0,0 +1,63 @@ +import React, { useEffect } from 'react'; +import styled from 'styled-components/macro'; +import { useEntityRegistry } from '../useEntityRegistry'; +import { ListBusinessAttributesQuery, useListBusinessAttributesQuery } from '../../graphql/businessAttribute.generated'; +import { sortBusinessAttributes } from './businessAttributeUtils'; +import AttributeItem from './AttributeItem'; + +const BrowserWrapper = styled.div` + color: #262626; + font-size: 12px; + max-height: calc(100% - 47px); + padding: 10px 20px 20px 20px; + overflow: auto; +`; + +interface Props { + isSelecting?: boolean; + hideTerms?: boolean; + refreshBrowser?: boolean; + selectAttribute?: (urn: string, displayName: string) => void; + attributeData?: ListBusinessAttributesQuery; +} + +function AttributeBrowser(props: Props) { + const { isSelecting, hideTerms, refreshBrowser, selectAttribute, attributeData } = props; + + const { refetch: refetchAttributes } = useListBusinessAttributesQuery({ + variables: { + start: 0, + count: 10, + query: '*', + }, + }); + + const displayedAttributes = attributeData?.listBusinessAttributes?.businessAttributes || []; + + const entityRegistry = useEntityRegistry(); + const sortedAttributes = displayedAttributes.sort((termA, termB) => + sortBusinessAttributes(entityRegistry, termA, termB), + ); + + useEffect(() => { + if (refreshBrowser) { + refetchAttributes(); + } + }, [refreshBrowser, refetchAttributes]); + + return ( + + {!hideTerms && + sortedAttributes.map((attribute) => ( + + ))} + + ); +} + +export default AttributeBrowser; diff --git a/datahub-web-react/src/app/businessAttribute/AttributeItem.tsx b/datahub-web-react/src/app/businessAttribute/AttributeItem.tsx new file mode 100644 index 00000000000000..051979d696f493 --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/AttributeItem.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import { useEntityRegistry } from '../useEntityRegistry'; + +const AttributeWrapper = styled.div` + font-weight: normal; + margin-bottom: 4px; +`; + +const nameStyles = ` + color: #262626; + display: inline-block; + height: 100%; + padding: 3px 4px; + width: 100%; +`; + +export const NameWrapper = styled.span<{ showSelectStyles?: boolean }>` + ${nameStyles} + + &:hover { + ${(props) => + props.showSelectStyles && + ` + background-color: ${ANTD_GRAY[3]}; + cursor: pointer; + `} + } +`; + +interface Props { + attribute: any; + isSelecting?: boolean; + selectAttribute?: (urn: string, displayName: string) => void; +} + +function AttributeItem(props: Props) { + const { attribute, isSelecting, selectAttribute } = props; + + const entityRegistry = useEntityRegistry(); + + function handleSelectAttribute() { + if (selectAttribute) { + const displayName = entityRegistry.getDisplayName(attribute.type, attribute); + selectAttribute(attribute.urn, displayName); + } + } + + return ( + + {isSelecting && ( + + {entityRegistry.getDisplayName(attribute.type, attribute)} + + )} + + ); +} + +export default AttributeItem; diff --git a/datahub-web-react/src/app/businessAttribute/BusinessAttributeItemMenu.tsx b/datahub-web-react/src/app/businessAttribute/BusinessAttributeItemMenu.tsx new file mode 100644 index 00000000000000..4e56d81203b6f5 --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/BusinessAttributeItemMenu.tsx @@ -0,0 +1,65 @@ +import React from 'react'; +import { DeleteOutlined } from '@ant-design/icons'; +import { Dropdown, Menu, message, Modal } from 'antd'; +import { MenuIcon } from '../entity/shared/EntityDropdown/EntityDropdown'; +import { useDeleteBusinessAttributeMutation } from '../../graphql/businessAttribute.generated'; + +type Props = { + urn: string; + title: string | undefined; + onDelete?: () => void; +}; + +export default function BusinessAttributeItemMenu({ title, urn, onDelete }: Props) { + const [deleteBusinessAttributeMutation] = useDeleteBusinessAttributeMutation(); + + const deletePost = () => { + deleteBusinessAttributeMutation({ + variables: { + urn, + }, + }) + .then(({ errors }) => { + if (!errors) { + message.success('Deleted Business Attribute!'); + onDelete?.(); + } + }) + .catch(() => { + message.destroy(); + message.error({ + content: `Failed to delete Business Attribute!: An unknown error occurred.`, + duration: 3, + }); + }); + }; + + const onConfirmDelete = () => { + Modal.confirm({ + title: `Delete Business Attribute '${title}'`, + content: `Are you sure you want to remove this Business Attribute?`, + onOk() { + deletePost(); + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }); + }; + + return ( + + +  Delete + + + } + > + + + ); +} diff --git a/datahub-web-react/src/app/businessAttribute/BusinessAttributes.tsx b/datahub-web-react/src/app/businessAttribute/BusinessAttributes.tsx new file mode 100644 index 00000000000000..b16593f5497f6e --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/BusinessAttributes.tsx @@ -0,0 +1,257 @@ +import React, { useState, useMemo } from 'react'; +import styled from 'styled-components'; +import { Button, Empty, message, Pagination, Typography } from 'antd'; +import { PlusOutlined } from '@ant-design/icons'; +import { AlignType } from 'rc-table/lib/interface'; +import { Link } from 'react-router-dom'; +import { useListBusinessAttributesQuery } from '../../graphql/businessAttribute.generated'; +import { Message } from '../shared/Message'; +import TabToolbar from '../entity/shared/components/styled/TabToolbar'; +import { StyledTable } from '../entity/shared/components/styled/StyledTable'; +import CreateBusinessAttributeModal from './CreateBusinessAttributeModal'; +import { scrollToTop } from '../shared/searchUtils'; +import { useUserContext } from '../context/useUserContext'; +import { BusinessAttribute } from '../../types.generated'; +import { SearchBar } from '../search/SearchBar'; +import { useEntityRegistry } from '../useEntityRegistry'; +import useTagsAndTermsRenderer from './utils/useTagsAndTermsRenderer'; +import useDescriptionRenderer from './utils/useDescriptionRenderer'; +import BusinessAttributeItemMenu from './BusinessAttributeItemMenu'; + +function BusinessAttributeListMenuColumn(handleDelete: () => void) { + return (record: BusinessAttribute) => ( + handleDelete()} /> + ); +} + +const SourceContainer = styled.div` + width: 100%; + padding-top: 20px; + padding-right: 40px; + padding-left: 40px; + display: flex; + flex-direction: column; + overflow: auto; +`; + +const BusinessAttributesContainer = styled.div` + padding-top: 0px; +`; + +const BusinessAttributeHeaderContainer = styled.div` + && { + padding-left: 0px; + } +`; + +const BusinessAttributeTitle = styled(Typography.Title)` + && { + margin-bottom: 8px; + } +`; + +const PaginationContainer = styled.div` + display: flex; + justify-content: center; +`; + +const searchBarStyle = { + maxWidth: 220, + padding: 0, +}; + +const searchBarInputStyle = { + height: 32, + fontSize: 12, +}; + +const DEFAULT_PAGE_SIZE = 10; + +export const BusinessAttributes = () => { + const [isCreatingBusinessAttribute, setIsCreatingBusinessAttribute] = useState(false); + const entityRegistry = useEntityRegistry(); + + // Current User Urn + const authenticatedUser = useUserContext(); + + const canCreateBusinessAttributes = authenticatedUser?.platformPrivileges?.createBusinessAttributes; + const [page, setPage] = useState(1); + const pageSize = DEFAULT_PAGE_SIZE; + const start = (page - 1) * pageSize; + const [query, setQuery] = useState(undefined); + const [tagHoveredUrn, setTagHoveredUrn] = useState(undefined); + + const { + loading: businessAttributeLoading, + error: businessAttributeError, + data: businessAttributeData, + refetch: businessAttributeRefetch, + } = useListBusinessAttributesQuery({ + variables: { + start, + count: pageSize, + query, + }, + }); + const descriptionRender = useDescriptionRenderer(businessAttributeRefetch); + const tagRenderer = useTagsAndTermsRenderer( + tagHoveredUrn, + setTagHoveredUrn, + { + showTags: true, + showTerms: false, + }, + query || '', + businessAttributeRefetch, + ); + + const termRenderer = useTagsAndTermsRenderer( + tagHoveredUrn, + setTagHoveredUrn, + { + showTags: false, + showTerms: true, + }, + query || '', + businessAttributeRefetch, + ); + + const totalBusinessAttributes = businessAttributeData?.listBusinessAttributes?.total || 0; + const businessAttributes = useMemo( + () => (businessAttributeData?.listBusinessAttributes?.businessAttributes || []) as BusinessAttribute[], + [businessAttributeData], + ); + + const onTagTermCell = (record: BusinessAttribute) => ({ + onMouseEnter: () => { + setTagHoveredUrn(record.urn); + }, + onMouseLeave: () => { + setTagHoveredUrn(undefined); + }, + }); + + const handleDelete = () => { + setTimeout(() => { + businessAttributeRefetch?.(); + }, 2000); + }; + const tableData = businessAttributes || []; + const tableColumns = [ + { + width: '20%', + title: 'Name', + dataIndex: ['properties', 'name'], + key: 'name', + render: (name: string, record: any) => ( + {name} + ), + }, + { + title: 'Description', + dataIndex: ['properties', 'description'], + key: 'description', + width: '20%', + // render: (description: string) => description || '', + render: descriptionRender, + }, + { + width: '20%', + title: 'Tags', + dataIndex: ['properties', 'tags'], + key: 'tags', + render: tagRenderer, + onCell: onTagTermCell, + }, + { + width: '20%', + title: 'Glossary Terms', + dataIndex: ['properties', 'glossaryTags'], + key: 'glossaryTags', + render: termRenderer, + onCell: onTagTermCell, + }, + { + width: '13%', + title: 'Data Type', + dataIndex: ['properties', 'businessAttributeDataType'], + key: 'businessAttributeDataType', + render: (dataType: string) => dataType || '', + }, + { + title: '', + dataIndex: '', + width: '5%', + align: 'right' as AlignType, + key: 'menu', + render: BusinessAttributeListMenuColumn(handleDelete), + }, + ]; + + const onChangePage = (newPage: number) => { + scrollToTop(); + setPage(newPage); + }; + + return ( + + {businessAttributeLoading && !businessAttributeData && ( + + )} + {businessAttributeError && message.error('Failed to load businessAttributes :(')} + + + Business Attribute + View your Business Attributes + + + + + null} + onQueryChange={(q) => setQuery(q.length > 0 ? q : undefined)} + entityRegistry={entityRegistry} + /> + + , + }} + pagination={false} + /> + + + + setIsCreatingBusinessAttribute(false)} + onCreateBusinessAttribute={() => { + businessAttributeRefetch?.(); + }} + /> + + ); +}; diff --git a/datahub-web-react/src/app/businessAttribute/CreateBusinessAttributeModal.tsx b/datahub-web-react/src/app/businessAttribute/CreateBusinessAttributeModal.tsx new file mode 100644 index 00000000000000..1ee0ca030748ef --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/CreateBusinessAttributeModal.tsx @@ -0,0 +1,255 @@ +import React, { useState } from 'react'; +import { message, Button, Input, Modal, Typography, Form, Select, Collapse } from 'antd'; +import styled from 'styled-components'; +import { EditOutlined } from '@ant-design/icons'; +import DOMPurify from 'dompurify'; +import { useEnterKeyListener } from '../shared/useEnterKeyListener'; +import { useCreateBusinessAttributeMutation } from '../../graphql/businessAttribute.generated'; +import { CreateBusinessAttributeInput, EntityType } from '../../types.generated'; +import analytics, { EventType } from '../analytics'; +import { useEntityRegistry } from '../useEntityRegistry'; +import DescriptionModal from '../entity/shared/components/legacy/DescriptionModal'; +import { SchemaFieldDataType } from './businessAttributeUtils'; +import { validateCustomUrnId } from '../shared/textUtil'; + +type Props = { + visible: boolean; + onClose: () => void; + onCreateBusinessAttribute: () => void; +}; + +type FormProps = { + name: string; + description?: string; + dataType?: SchemaFieldDataType; +}; + +const DataTypeSelectContainer = styled.div` + padding: 1px; +`; + +const DataTypeSelect = styled(Select)` + && { + width: 100%; + margin-top: 1em; + margin-bottom: 1em; + } +`; + +const StyledItem = styled(Form.Item)` + margin-bottom: 0; +`; + +const OptionalWrapper = styled.span` + font-weight: normal; +`; + +const StyledButton = styled(Button)` + padding: 0; +`; + +// Ensures that any newly added datatype is automatically included in the user dropdown. +const DATA_TYPES = Object.values(SchemaFieldDataType); + +export default function CreateBusinessAttributeModal({ visible, onClose, onCreateBusinessAttribute }: Props) { + const [createButtonEnabled, setCreateButtonEnabled] = useState(true); + + const [createBusinessAttribute] = useCreateBusinessAttributeMutation(); + + const [isDocumentationModalVisible, setIsDocumentationModalVisible] = useState(false); + + const [documentation, setDocumentation] = useState(''); + + const [form] = Form.useForm(); + + const entityRegistry = useEntityRegistry(); + + const [stagedId, setStagedId] = useState(undefined); + + // Function to handle the close or cross button of Create Business Attribute Modal + const onModalClose = () => { + form.resetFields(); + onClose(); + }; + + const onCreateNewBusinessAttribute = () => { + const { name, dataType } = form.getFieldsValue(); + const sanitizedDescription = DOMPurify.sanitize(documentation); + const input: CreateBusinessAttributeInput = { + id: stagedId?.length ? stagedId : undefined, + name, + description: sanitizedDescription, + type: dataType, + }; + createBusinessAttribute({ variables: { input } }) + .then(() => { + message.loading({ content: 'Updating...', duration: 2 }); + setTimeout(() => { + analytics.event({ + type: EventType.CreateBusinessAttributeEvent, + name, + }); + message.success({ + content: `Created ${entityRegistry.getEntityName(EntityType.BusinessAttribute)}!`, + duration: 2, + }); + if (onCreateBusinessAttribute) { + onCreateBusinessAttribute(); + } + }, 2000); + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to create: \n ${e.message || ''}`, duration: 3 }); + }); + onModalClose(); + setDocumentation(''); + }; + + // Handle the Enter press + useEnterKeyListener({ + querySelectorToExecuteClick: '#createBusinessAttributeButton', + }); + + function addDocumentation(description: string) { + setDocumentation(description); + setIsDocumentationModalVisible(false); + } + + return ( + <> + + + + + } + > + + setCreateButtonEnabled(form.getFieldsError().some((field) => field.errors.length > 0)) + } + > + Name}> + + + + + + Data Type}> + + + {DATA_TYPES.map((dataType: SchemaFieldDataType) => ( + + {dataType} + + ))} + + + + + + Documentation (optional) + + } + > + setIsDocumentationModalVisible(true)}> + + {documentation ? 'Edit' : 'Add'} Documentation + + {isDocumentationModalVisible && ( + setIsDocumentationModalVisible(false)} + onSubmit={addDocumentation} + description={documentation} + /> + )} + + + Advanced} key="1"> + + {entityRegistry.getEntityName(EntityType.BusinessAttribute)} Id + + } + > + + By default, a random UUID will be generated to uniquely identify this entity. If + you'd like to provide a custom id, you may provide it here. Note that it should + be unique across the entire Business Attributes. Be careful, you cannot easily + change the id after creation. + + ({ + validator(_, value) { + if (value && validateCustomUrnId(value)) { + return Promise.resolve(); + } + return Promise.reject(new Error('Please enter a valid entity id')); + }, + }), + ]} + > + setStagedId(event.target.value)} + /> + + + + + + + + ); +} diff --git a/datahub-web-react/src/app/businessAttribute/businessAttributeUtils.ts b/datahub-web-react/src/app/businessAttribute/businessAttributeUtils.ts new file mode 100644 index 00000000000000..ec8c44d79901c3 --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/businessAttributeUtils.ts @@ -0,0 +1,37 @@ +import EntityRegistry from '../entity/EntityRegistry'; +import { Entity, EntityType } from '../../types.generated'; + +export function sortBusinessAttributes(entityRegistry: EntityRegistry, nodeA?: Entity | null, nodeB?: Entity | null) { + const nodeAName = entityRegistry.getDisplayName(EntityType.BusinessAttribute, nodeA) || ''; + const nodeBName = entityRegistry.getDisplayName(EntityType.BusinessAttribute, nodeB) || ''; + return nodeAName.localeCompare(nodeBName); +} + +export function getRelatedEntitiesUrl(entityRegistry: EntityRegistry, urn: string) { + return `${entityRegistry.getEntityUrl(EntityType.BusinessAttribute, urn)}/${encodeURIComponent( + 'Related Entities', + )}`; +} + +export enum SchemaFieldDataType { + /** A boolean type */ + Boolean = 'BOOLEAN', + /** A fixed bytestring type */ + Fixed = 'FIXED', + /** A string type */ + String = 'STRING', + /** A string of bytes */ + Bytes = 'BYTES', + /** A number, including integers, floats, and doubles */ + Number = 'NUMBER', + /** A datestrings type */ + Date = 'DATE', + /** A timestamp type */ + Time = 'TIME', + /** An enum type */ + Enum = 'ENUM', + /** A map collection type */ + Map = 'MAP', + /** An array collection type */ + Array = 'ARRAY', +} diff --git a/datahub-web-react/src/app/businessAttribute/utils/useDescriptionRenderer.tsx b/datahub-web-react/src/app/businessAttribute/utils/useDescriptionRenderer.tsx new file mode 100644 index 00000000000000..ef665e45aeefdd --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/utils/useDescriptionRenderer.tsx @@ -0,0 +1,41 @@ +import React, { useState } from 'react'; +import DOMPurify from 'dompurify'; +import { BusinessAttribute } from '../../../types.generated'; +import DescriptionField from '../../entity/dataset/profile/schema/components/SchemaDescriptionField'; +import { useUpdateDescriptionMutation } from '../../../graphql/mutations.generated'; + +export default function useDescriptionRenderer(businessAttributeRefetch: () => Promise) { + const [updateDescription] = useUpdateDescriptionMutation(); + const [expandedRows, setExpandedRows] = useState({}); + + const refresh: any = () => { + businessAttributeRefetch?.(); + }; + + return (description: string, record: BusinessAttribute, index: number): JSX.Element => { + const relevantEditableFieldInfo = record?.properties; + const displayedDescription = relevantEditableFieldInfo?.description || description; + const sanitizedDescription = DOMPurify.sanitize(displayedDescription); + + const handleExpandedRows = (expanded) => setExpandedRows((prev) => ({ ...prev, [index]: expanded })); + + return ( + + updateDescription({ + variables: { + input: { + description: DOMPurify.sanitize(updatedDescription), + resourceUrn: record.urn, + }, + }, + }).then(refresh) + } + /> + ); + }; +} +// diff --git a/datahub-web-react/src/app/businessAttribute/utils/useTagsAndTermsRenderer.tsx b/datahub-web-react/src/app/businessAttribute/utils/useTagsAndTermsRenderer.tsx new file mode 100644 index 00000000000000..7c138c99dbd1a8 --- /dev/null +++ b/datahub-web-react/src/app/businessAttribute/utils/useTagsAndTermsRenderer.tsx @@ -0,0 +1,38 @@ +import React from 'react'; +import { EntityType, GlobalTags, BusinessAttribute } from '../../../types.generated'; +import TagTermGroup from '../../shared/tags/TagTermGroup'; + +export default function useTagsAndTermsRenderer( + tagHoveredUrn: string | undefined, + setTagHoveredUrn: (index: string | undefined) => void, + options: { showTags: boolean; showTerms: boolean }, + filterText: string, + businessAttributeRefetch: () => Promise, +) { + const urn = tagHoveredUrn; + + const refresh: any = () => { + businessAttributeRefetch?.(); + }; + + const tagAndTermRender = (tags: GlobalTags, record: BusinessAttribute) => { + return ( +
+ setTagHoveredUrn(undefined)} + entityUrn={urn} + entityType={EntityType.BusinessAttribute} + highlightText={filterText} + refetch={refresh} + /> +
+ ); + }; + return tagAndTermRender; +} diff --git a/datahub-web-react/src/app/context/UserContextProvider.tsx b/datahub-web-react/src/app/context/UserContextProvider.tsx index 3bcff15cc27485..66593d346f3df4 100644 --- a/datahub-web-react/src/app/context/UserContextProvider.tsx +++ b/datahub-web-react/src/app/context/UserContextProvider.tsx @@ -127,6 +127,7 @@ const UserContextProvider = ({ children }: { children: React.ReactNode }) => { return ( { window.clearTimeout(timerRef.current); timerRef.current = window.setTimeout(() => { @@ -81,6 +61,26 @@ function DomainSearch() { }, 250); }; + const renderLoadingIndicator = () => ( + + + + ); + + const renderSearchResults = () => ( + + {searchResults?.map((result) => ( + setIsSearchBarFocused(false)} + /> + ))} + + ); + return ( setIsSearchBarFocused(false)}> @@ -102,39 +102,8 @@ function DomainSearch() { entityRegistry={entityRegistry} onFocus={() => setIsSearchBarFocused(true)} /> - {isSearchBarFocused && searchResults && !!searchResults.length && ( - - {searchResults.map((result) => { - return ( - setIsSearchBarFocused(false)} - > - - {result.entity.type === EntityType.Domain ? ( - - ) : ( - entityRegistry.getIcon(result.entity.type, 12, IconStyleType.ACCENT) - )} - -
- - - {entityRegistry.getDisplayName(result.entity.type, result.entity)} - -
-
- ); - })} -
- )} + {loading && renderLoadingIndicator()} + {!loading && isSearchBarFocused && !!searchResults?.length && renderSearchResults()}
); diff --git a/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx b/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx new file mode 100644 index 00000000000000..dc33ea173e0aeb --- /dev/null +++ b/datahub-web-react/src/app/domain/DomainSearchResultItem.tsx @@ -0,0 +1,68 @@ +// Create a new component called SearchResultItem.js +import React from 'react'; +import { Link } from 'react-router-dom'; +import Highlight from 'react-highlighter'; +import styled from 'styled-components/macro'; +import { Entity, EntityType } from '../../types.generated'; +import { IconStyleType } from '../entity/Entity'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import DomainIcon from './DomainIcon'; +import ParentEntities from '../search/filters/ParentEntities'; +import { getParentDomains } from './utils'; +import EntityRegistry from '../entity/EntityRegistry'; + +type Props = { + entity: Entity; + entityRegistry: EntityRegistry; + query: string; + onResultClick: () => void; +}; + +const SearchResult = styled(Link)` + color: #262626; + display: flex; + align-items: center; + gap: 8px; + height: 100%; + padding: 6px 8px; + width: 100%; + &:hover { + background-color: ${ANTD_GRAY[3]}; + color: #262626; + } +`; + +const IconWrapper = styled.span``; + +const highlightMatchStyle = { + fontWeight: 'bold', + background: 'none', + padding: 0, +}; + +function DomainSearchResultItem({ entity, entityRegistry, query, onResultClick }: Props) { + return ( + + + {entity.type === EntityType.Domain ? ( + + ) : ( + entityRegistry.getIcon(entity.type, 12, IconStyleType.ACCENT) + )} + +
+ + + {entityRegistry.getDisplayName(entity.type, entity)} + +
+
+ ); +} + +export default DomainSearchResultItem; diff --git a/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx b/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx new file mode 100644 index 00000000000000..6a5f304e565be2 --- /dev/null +++ b/datahub-web-react/src/app/domain/EmptyDomainDescription.tsx @@ -0,0 +1,39 @@ +import { Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; + +const StyledParagraph = styled(Typography.Paragraph)` + text-align: justify; + text-justify: inter-word; + margin: 40px 0; + font-size: 15px; +`; + +function EmptyDomainDescription() { + return ( + <> + + Welcome to your Data Domains! It looks like this space + is ready to be transformed into a well-organized data universe. Start by creating your first domain - a + high-level category for your data assets. + + + Create Nested Domains: Want to dive deeper? You can + also create nested domains to add granularity and structure. Just like nesting Russian dolls, its all + about refining your organization. + + + Build Data Products: Once your domains are set, go a + step further! Organize your data assets into data products to realize a data mesh architecture. Data + products empower you to treat data as a product, making it more accessible and manageable. + + + Ready to embark on this data adventure? Click the Create Domain button to begin shaping your data + landscape! + + + ); +} + +export default EmptyDomainDescription; diff --git a/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx b/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx new file mode 100644 index 00000000000000..f232d259c20dad --- /dev/null +++ b/datahub-web-react/src/app/domain/EmptyDomainsSection.tsx @@ -0,0 +1,69 @@ +import { PlusOutlined } from '@ant-design/icons'; +import { Button, Empty, Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; + +const EmptyDomainContainer = styled.div` + display: flex; + justify-content: center; + align-items: center; +`; + +const StyledEmpty = styled(Empty)` + width: 35vw; + @media screen and (max-width: 1300px) { + width: 50vw; + } + @media screen and (max-width: 896px) { + overflow-y: auto; + max-height: 75vh; + &::-webkit-scrollbar { + width: 5px; + background: #d6d6d6; + } + } + padding: 60px 40px; + .ant-empty-image { + display: none; + } +`; + +const StyledButton = styled(Button)` + margin: 18px 8px 0 0; +`; + +const IconContainer = styled.span` + color: ${ANTD_GRAY[7]}; + font-size: 40px; +`; + +interface Props { + title?: string; + setIsCreatingDomain: React.Dispatch>; + description?: React.ReactNode; + icon?: React.ReactNode; +} + +function EmptyDomainsSection(props: Props) { + const { title, description, setIsCreatingDomain, icon } = props; + return ( + + + {icon} + {title} + {description} + + } + > + setIsCreatingDomain(true)}> + Create Domain + + + + ); +} + +export default EmptyDomainsSection; diff --git a/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx b/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx index b69f0c5458b5de..f5fc0cba2d8ec2 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/ManageDomainsPageV2.tsx @@ -51,7 +51,7 @@ export default function ManageDomainsPageV2() { New Domain - + {isCreatingDomain && ( setIsCreatingDomain(false)} diff --git a/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx b/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx index 757119919e3367..75c38cd4951ef2 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/RootDomains.tsx @@ -1,17 +1,23 @@ import React from 'react'; import styled from 'styled-components'; +import { ReadOutlined } from '@ant-design/icons'; import { Message } from '../../shared/Message'; import { ResultWrapper } from '../../search/SearchResultList'; import { useEntityRegistry } from '../../useEntityRegistry'; import { EntityType } from '../../../types.generated'; import useListDomains from '../useListDomains'; +import EmptyDomainsSection from '../EmptyDomainsSection'; +import EmptyDomainDescription from '../EmptyDomainDescription'; const DomainsWrapper = styled.div` overflow: auto; padding: 0 28px 16px 28px; `; -export default function RootDomains() { +interface Props { + setIsCreatingDomain: React.Dispatch>; +} +export default function RootDomains({ setIsCreatingDomain }: Props) { const entityRegistry = useEntityRegistry(); const { loading, error, data, sortedDomains } = useListDomains({}); @@ -19,6 +25,14 @@ export default function RootDomains() { <> {!data && loading && } {error && } + {!loading && (!data || !data?.listDomains?.domains?.length) && ( + } + title="Organize your data" + description={} + setIsCreatingDomain={setIsCreatingDomain} + /> + )} {sortedDomains?.map((domain) => ( diff --git a/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx b/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx index 0fbcffb9a260c7..8decc2840a379c 100644 --- a/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx +++ b/datahub-web-react/src/app/domain/nestedDomains/domainNavigator/DomainNavigator.tsx @@ -1,9 +1,10 @@ -import { Alert } from 'antd'; +import { Alert, Empty } from 'antd'; import React from 'react'; import styled from 'styled-components'; import useListDomains from '../../useListDomains'; import DomainNode from './DomainNode'; import { Domain } from '../../../../types.generated'; +import { ANTD_GRAY } from '../../../entity/shared/constants'; const NavigatorWrapper = styled.div` font-size: 14px; @@ -19,19 +20,28 @@ interface Props { export default function DomainNavigator({ domainUrnToHide, selectDomainOverride }: Props) { const { sortedDomains, error } = useListDomains({}); + const noDomainsFound: boolean = !sortedDomains || sortedDomains.length === 0; return ( {error && } - {sortedDomains?.map((domain) => ( - - ))} + )} + {!noDomainsFound && + sortedDomains?.map((domain) => ( + + ))} ); } diff --git a/datahub-web-react/src/app/embed/EmbeddedPage.tsx b/datahub-web-react/src/app/embed/EmbeddedPage.tsx index 429f83f34af6e8..29d62bc20181b9 100644 --- a/datahub-web-react/src/app/embed/EmbeddedPage.tsx +++ b/datahub-web-react/src/app/embed/EmbeddedPage.tsx @@ -8,9 +8,9 @@ import { VIEW_ENTITY_PAGE } from '../entity/shared/constants'; import { decodeUrn } from '../entity/shared/utils'; import CompactContext from '../shared/CompactContext'; import { useEntityRegistry } from '../useEntityRegistry'; -import { useGetAuthenticatedUserUrn } from '../useGetAuthenticatedUser'; import analytics from '../analytics/analytics'; import { EventType } from '../analytics'; +import { useUserContext } from '../context/useUserContext'; const EmbeddedPageWrapper = styled.div` max-height: 100%; @@ -39,15 +39,16 @@ export default function EmbeddedPage({ entityType }: Props) { }); }, [entityType, urn]); - const authenticatedUserUrn = useGetAuthenticatedUserUrn(); + const { urn: authenticatedUserUrn } = useUserContext(); const { data } = useGetGrantedPrivilegesQuery({ variables: { input: { - actorUrn: authenticatedUserUrn, + actorUrn: authenticatedUserUrn as string, resourceSpec: { resourceType: entityType, resourceUrn: urn }, }, }, fetchPolicy: 'cache-first', + skip: !authenticatedUserUrn, }); const privileges = data?.getGrantedPrivileges?.privileges || []; diff --git a/datahub-web-react/src/app/embed/lookup/constants.ts b/datahub-web-react/src/app/embed/lookup/constants.ts index 7faba45dba0ed4..2499397e222cc6 100644 --- a/datahub-web-react/src/app/embed/lookup/constants.ts +++ b/datahub-web-react/src/app/embed/lookup/constants.ts @@ -4,4 +4,4 @@ export const EMBED_LOOKUP_NOT_FOUND_REASON = { } as const; export type EmbedLookupNotFoundReason = - typeof EMBED_LOOKUP_NOT_FOUND_REASON[keyof typeof EMBED_LOOKUP_NOT_FOUND_REASON]; + (typeof EMBED_LOOKUP_NOT_FOUND_REASON)[keyof typeof EMBED_LOOKUP_NOT_FOUND_REASON]; diff --git a/datahub-web-react/src/app/entity/Access/RoleEntity.tsx b/datahub-web-react/src/app/entity/Access/RoleEntity.tsx index e63db9d0bbb2a4..ab609b04f104ac 100644 --- a/datahub-web-react/src/app/entity/Access/RoleEntity.tsx +++ b/datahub-web-react/src/app/entity/Access/RoleEntity.tsx @@ -7,6 +7,7 @@ import { Entity, EntityCapabilityType, IconStyleType, PreviewType } from '../Ent import { getDataForEntityType } from '../shared/containers/profile/utils'; import { urlEncodeUrn } from '../shared/utils'; import RoleEntityProfile from './RoleEntityProfile'; +import { useGetExternalRoleQuery } from '../../../graphql/accessrole.generated'; const PreviewTagIcon = styled(TagOutlined)` font-size: 20px; @@ -50,6 +51,8 @@ export class RoleEntity implements Entity { getEntityName: () => string = () => 'Role'; + useEntityQuery = useGetExternalRoleQuery; + renderProfile: (urn: string) => JSX.Element = (_) => ; renderPreview = (_: PreviewType, data: Role) => ( diff --git a/datahub-web-react/src/app/entity/Entity.tsx b/datahub-web-react/src/app/entity/Entity.tsx index 5920919a9cdab2..490f23330c5945 100644 --- a/datahub-web-react/src/app/entity/Entity.tsx +++ b/datahub-web-react/src/app/entity/Entity.tsx @@ -1,6 +1,7 @@ -import { EntityType, SearchResult } from '../../types.generated'; +import { QueryHookOptions, QueryResult } from '@apollo/client'; +import { EntityType, Exact, SearchResult } from '../../types.generated'; import { FetchedEntity } from '../lineage/types'; -import { GenericEntityProperties } from './shared/types'; +import { EntitySidebarSection, GenericEntityProperties } from './shared/types'; export enum PreviewType { /** @@ -80,6 +81,10 @@ export enum EntityCapabilityType { * Assigning the entity to a data product */ DATA_PRODUCTS, + /** + * Assigning Business Attribute to a entity + */ + BUSINESS_ATTRIBUTES, } /** @@ -176,4 +181,31 @@ export interface Entity { * Returns the profile component to be displayed in our Chrome extension */ renderEmbeddedProfile?: (urn: string) => JSX.Element; + + /** + * Returns the entity profile sidebar sections for an entity type. Only implemented on Datasets for now. + */ + getSidebarSections?: () => EntitySidebarSection[]; + + /** + * Get the query necessary for refetching data on an entity profile page + */ + useEntityQuery?: ( + baseOptions: QueryHookOptions< + any, + Exact<{ + urn: string; + }> + >, + ) => QueryResult< + any, + Exact<{ + urn: string; + }> + >; + + /** + * Returns the url to be navigated to when clicked on Cards + */ + getCustomCardUrlPath?: () => string | undefined; } diff --git a/datahub-web-react/src/app/entity/EntityPage.tsx b/datahub-web-react/src/app/entity/EntityPage.tsx index 09233dbd89f694..916fa417954126 100644 --- a/datahub-web-react/src/app/entity/EntityPage.tsx +++ b/datahub-web-react/src/app/entity/EntityPage.tsx @@ -8,7 +8,6 @@ import { useEntityRegistry } from '../useEntityRegistry'; import analytics, { EventType } from '../analytics'; import { decodeUrn } from './shared/utils'; import { useGetGrantedPrivilegesQuery } from '../../graphql/policy.generated'; -import { Message } from '../shared/Message'; import { UnauthorizedPage } from '../authorization/UnauthorizedPage'; import { ErrorSection } from '../shared/error/ErrorSection'; import { VIEW_ENTITY_PAGE } from './shared/constants'; @@ -34,7 +33,7 @@ export const EntityPage = ({ entityType }: Props) => { const isLineageSupported = entity.isLineageEnabled(); const isLineageMode = useIsLineageMode(); const authenticatedUserUrn = useUserContext()?.user?.urn; - const { loading, error, data } = useGetGrantedPrivilegesQuery({ + const { error, data } = useGetGrantedPrivilegesQuery({ variables: { input: { actorUrn: authenticatedUserUrn as string, @@ -71,7 +70,6 @@ export const EntityPage = ({ entityType }: Props) => { return ( <> - {loading && } {error && } {data && !canViewEntityPage && } {canViewEntityPage && diff --git a/datahub-web-react/src/app/entity/EntityRegistry.tsx b/datahub-web-react/src/app/entity/EntityRegistry.tsx index 6642c2c7b0467c..00e7385ff5784b 100644 --- a/datahub-web-react/src/app/entity/EntityRegistry.tsx +++ b/datahub-web-react/src/app/entity/EntityRegistry.tsx @@ -1,10 +1,11 @@ +import { QueryHookOptions, QueryResult } from '@apollo/client'; import React from 'react'; -import { Entity as EntityInterface, EntityType, SearchResult } from '../../types.generated'; +import { Entity as EntityInterface, EntityType, Exact, SearchResult } from '../../types.generated'; import { FetchedEntity } from '../lineage/types'; import { SearchResultProvider } from '../search/context/SearchResultContext'; import { Entity, EntityCapabilityType, IconStyleType, PreviewType } from './Entity'; import { GLOSSARY_ENTITY_TYPES } from './shared/constants'; -import { GenericEntityProperties } from './shared/types'; +import { EntitySidebarSection, GenericEntityProperties } from './shared/types'; import { dictToQueryStringParams, getFineGrainedLineageWithSiblings, urlEncodeUrn } from './shared/utils'; function validatedGet(key: K, map: Map): V { @@ -115,6 +116,25 @@ export default class EntityRegistry { } } + getEntityQuery(type: EntityType): + | (( + baseOptions: QueryHookOptions< + any, + Exact<{ + urn: string; + }> + >, + ) => QueryResult< + any, + Exact<{ + urn: string; + }> + >) + | undefined { + const entity = validatedGet(type, this.entityTypeToEntity); + return entity.useEntityQuery; + } + renderProfile(type: EntityType, urn: string): JSX.Element { const entity = validatedGet(type, this.entityTypeToEntity); return entity.renderProfile(urn); @@ -194,6 +214,11 @@ export default class EntityRegistry { return entity.displayName(data); } + getSidebarSections(type: EntityType): EntitySidebarSection[] { + const entity = validatedGet(type, this.entityTypeToEntity); + return entity.getSidebarSections ? entity.getSidebarSections() : []; + } + getGenericEntityProperties(type: EntityType, data: T): GenericEntityProperties | null { const entity = validatedGet(type, this.entityTypeToEntity); return entity.getGenericEntityProperties(data); @@ -211,4 +236,9 @@ export default class EntityRegistry { .map((entity) => entity.type), ); } + + getCustomCardUrlPath(type: EntityType): string | undefined { + const entity = validatedGet(type, this.entityTypeToEntity); + return entity.getCustomCardUrlPath?.(); + } } diff --git a/datahub-web-react/src/app/entity/businessAttribute/BusinessAttributeEntity.tsx b/datahub-web-react/src/app/entity/businessAttribute/BusinessAttributeEntity.tsx new file mode 100644 index 00000000000000..b827a3c37d6a5c --- /dev/null +++ b/datahub-web-react/src/app/entity/businessAttribute/BusinessAttributeEntity.tsx @@ -0,0 +1,156 @@ +import * as React from 'react'; +import { GlobalOutlined } from '@ant-design/icons'; +import { BusinessAttribute, EntityType, SearchResult } from '../../../types.generated'; +import { Entity, EntityCapabilityType, IconStyleType, PreviewType } from '../Entity'; +import { getDataForEntityType } from '../shared/containers/profile/utils'; +import { EntityProfile } from '../shared/containers/profile/EntityProfile'; +import { useGetBusinessAttributeQuery } from '../../../graphql/businessAttribute.generated'; +import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; +import { DocumentationTab } from '../shared/tabs/Documentation/DocumentationTab'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; +import { SidebarAboutSection } from '../shared/containers/profile/sidebar/AboutSection/SidebarAboutSection'; +import { SidebarOwnerSection } from '../shared/containers/profile/sidebar/Ownership/sidebar/SidebarOwnerSection'; +import { SidebarTagsSection } from '../shared/containers/profile/sidebar/SidebarTagsSection'; +import { Preview } from './preview/Preview'; +import { PageRoutes } from '../../../conf/Global'; +import BusinessAttributeRelatedEntity from './profile/BusinessAttributeRelatedEntity'; +import { BusinessAttributeDataTypeSection } from './profile/BusinessAttributeDataTypeSection'; + +/** + * Definition of datahub Business Attribute Entity + */ +/* eslint-disable @typescript-eslint/no-unused-vars */ +export class BusinessAttributeEntity implements Entity { + type: EntityType = EntityType.BusinessAttribute; + + icon = (fontSize: number, styleType: IconStyleType, color?: string) => { + if (styleType === IconStyleType.TAB_VIEW) { + return ; + } + + if (styleType === IconStyleType.HIGHLIGHT) { + return ; + } + + if (styleType === IconStyleType.SVG) { + // TODO: Update the returned path value to the correct svg icon path + return ( + + ); + } + + return ( + + ); + }; + + displayName = (data: BusinessAttribute) => { + return data?.properties?.name || data?.urn; + }; + + getPathName = () => 'business-attribute'; + + getEntityName = () => 'Business Attribute'; + + getCollectionName = () => 'Business Attributes'; + + getCustomCardUrlPath = () => PageRoutes.BUSINESS_ATTRIBUTE; + + isBrowseEnabled = () => false; + + isLineageEnabled = () => false; + + isSearchEnabled = () => true; + + getOverridePropertiesFromEntity = (data: BusinessAttribute) => { + return { + name: data.properties?.name, + }; + }; + + getGenericEntityProperties = (data: BusinessAttribute) => { + return getDataForEntityType({ + data, + entityType: this.type, + getOverrideProperties: this.getOverridePropertiesFromEntity, + }); + }; + + renderPreview = (previewType: PreviewType, data: BusinessAttribute) => { + return ( + + ); + }; + + renderProfile = (urn: string) => { + return ( + + ); + }; + + renderSearch = (result: SearchResult) => { + return this.renderPreview(PreviewType.SEARCH, result.entity as BusinessAttribute); + }; + + supportedCapabilities = () => { + return new Set([ + EntityCapabilityType.OWNERS, + EntityCapabilityType.TAGS, + EntityCapabilityType.GLOSSARY_TERMS, + EntityCapabilityType.BUSINESS_ATTRIBUTES, + ]); + }; +} diff --git a/datahub-web-react/src/app/entity/businessAttribute/preview/Preview.tsx b/datahub-web-react/src/app/entity/businessAttribute/preview/Preview.tsx new file mode 100644 index 00000000000000..323c287a0acd78 --- /dev/null +++ b/datahub-web-react/src/app/entity/businessAttribute/preview/Preview.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import { GlobalOutlined } from '@ant-design/icons'; +import { EntityType, Owner } from '../../../../types.generated'; +import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { IconStyleType, PreviewType } from '../../Entity'; +import UrlButton from '../../shared/UrlButton'; +import { getRelatedEntitiesUrl } from '../../../businessAttribute/businessAttributeUtils'; + +export const Preview = ({ + urn, + name, + description, + owners, + previewType, +}: { + urn: string; + name: string; + description?: string | null; + owners?: Array | null; + previewType: PreviewType; +}): JSX.Element => { + const entityRegistry = useEntityRegistry(); + return ( + } + type="Business Attribute" + typeIcon={entityRegistry.getIcon(EntityType.BusinessAttribute, 14, IconStyleType.ACCENT)} + entityTitleSuffix={ + View Related Entities + } + /> + ); +}; diff --git a/datahub-web-react/src/app/entity/businessAttribute/preview/_tests_/Preview.test.tsx b/datahub-web-react/src/app/entity/businessAttribute/preview/_tests_/Preview.test.tsx new file mode 100644 index 00000000000000..bca32de985d377 --- /dev/null +++ b/datahub-web-react/src/app/entity/businessAttribute/preview/_tests_/Preview.test.tsx @@ -0,0 +1,26 @@ +import { MockedProvider } from '@apollo/client/testing'; +import { render } from '@testing-library/react'; +import React from 'react'; +import { mocks } from '../../../../../Mocks'; +import TestPageContainer from '../../../../../utils/test-utils/TestPageContainer'; +import { Preview } from '../Preview'; +import { PreviewType } from '../../../Entity'; + +describe('Preview', () => { + it('renders', () => { + const { getByText } = render( + + + + + , + ); + expect(getByText('definition')).toBeInTheDocument(); + }); +}); diff --git a/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeDataTypeSection.tsx b/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeDataTypeSection.tsx new file mode 100644 index 00000000000000..da2b108c2d8d04 --- /dev/null +++ b/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeDataTypeSection.tsx @@ -0,0 +1,98 @@ +import { Button, message, Select } from 'antd'; +import { EditOutlined } from '@ant-design/icons'; +import React, { useEffect, useState } from 'react'; +import styled from 'styled-components'; +import { useEntityData, useRefetch } from '../../shared/EntityContext'; +import { SidebarHeader } from '../../shared/containers/profile/sidebar/SidebarHeader'; +import { useUpdateBusinessAttributeMutation } from '../../../../graphql/businessAttribute.generated'; +import { SchemaFieldDataType } from '../../../businessAttribute/businessAttributeUtils'; + +interface Props { + readOnly?: boolean; +} + +const DataTypeSelect = styled(Select)` + && { + width: 100%; + margin-top: 1em; + margin-bottom: 1em; + } +`; +// Ensures that any newly added datatype is automatically included in the user dropdown. +const DATA_TYPES = Object.values(SchemaFieldDataType); +export const BusinessAttributeDataTypeSection = ({ readOnly }: Props) => { + const { urn, entityData } = useEntityData(); + const [originalDescription, setOriginalDescription] = useState(null); + const [isEditing, setEditing] = useState(false); + const refetch = useRefetch(); + + useEffect(() => { + if (entityData?.properties?.businessAttributeDataType) { + setOriginalDescription(entityData?.properties?.businessAttributeDataType); + } + }, [entityData]); + + const [updateBusinessAttribute] = useUpdateBusinessAttributeMutation(); + + const handleChange = (value) => { + if (value === originalDescription) { + setEditing(false); + return; + } + + updateBusinessAttribute({ variables: { urn, input: { type: value } } }) + .then(() => { + setEditing(false); + setOriginalDescription(value); + message.success({ content: 'Data Type Updated', duration: 2 }); + refetch(); + }) + .catch((e: unknown) => { + message.destroy(); + if (e instanceof Error) { + message.error({ content: `Failed to update Data Type: \n ${e.message || ''}`, duration: 3 }); + } + }); + }; + + // Toggle editing mode + const handleEditClick = () => { + setEditing(!isEditing); + }; + + return ( +
+ + + + ) + } + /> + {originalDescription} + {isEditing && ( + + {DATA_TYPES.map((dataType: SchemaFieldDataType) => ( + + {dataType} + + ))} + + )} +
+ ); +}; + +export default BusinessAttributeDataTypeSection; diff --git a/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeRelatedEntity.tsx b/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeRelatedEntity.tsx new file mode 100644 index 00000000000000..46d9d4ea51d245 --- /dev/null +++ b/datahub-web-react/src/app/entity/businessAttribute/profile/BusinessAttributeRelatedEntity.tsx @@ -0,0 +1,44 @@ +import * as React from 'react'; +import { UnionType } from '../../../search/utils/constants'; +import { EmbeddedListSearchSection } from '../../shared/components/styled/search/EmbeddedListSearchSection'; + +import { useEntityData } from '../../shared/EntityContext'; + +export default function BusinessAttributeRelatedEntity() { + const { entityData } = useEntityData(); + + const entityUrn = entityData?.urn; + + const fixedOrFilters = + (entityUrn && [ + { + field: 'businessAttribute', + values: [entityUrn], + }, + ]) || + []; + + entityData?.isAChildren?.relationships.forEach((businessAttribute) => { + const childUrn = businessAttribute.entity?.urn; + + if (childUrn) { + fixedOrFilters.push({ + field: 'businessAttributes', + values: [childUrn], + }); + } + }); + + return ( + + ); +} diff --git a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx index 0f1b6dbf3d660d..913d502972fe14 100644 --- a/datahub-web-react/src/app/entity/chart/ChartEntity.tsx +++ b/datahub-web-react/src/app/entity/chart/ChartEntity.tsx @@ -27,6 +27,8 @@ import EmbeddedProfile from '../shared/embed/EmbeddedProfile'; import { LOOKER_URN } from '../../ingest/source/builder/constants'; import { MatchedFieldList } from '../../search/matches/MatchedFieldList'; import { matchedInputFieldRenderer } from '../../search/matches/matchedInputFieldRenderer'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; +import { ChartQueryTab } from './ChartQueryTab'; /** * Definition of the DataHub Chart entity. @@ -73,18 +75,50 @@ export class ChartEntity implements Entity { getCollectionName = () => 'Charts'; + useEntityQuery = useGetChartQuery; + + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderProfile = (urn: string) => ( (chart?.chart?.query?.rawQuery && true) || false, + enabled: (_, chart: GetChartQuery) => (chart?.chart?.query?.rawQuery && true) || false, + }, + }, { name: 'Documentation', component: DocumentationTab, @@ -126,38 +160,28 @@ export class ChartEntity implements Entity { name: 'Properties', component: PropertiesTab, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, chart) => { + const activeIncidentCount = chart?.chart?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; }, }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, ]} + sidebarSections={this.getSidebarSections()} /> ); getOverridePropertiesFromEntity = (chart?: Chart | null): GenericEntityProperties => { // TODO: Get rid of this once we have correctly formed platform coming back. const name = chart?.properties?.name; + const subTypes = chart?.subTypes; const externalUrl = chart?.properties?.externalUrl; return { name, externalUrl, + entityTypeOverride: subTypes ? capitalizeFirstLetterOnly(subTypes.typeNames?.[0]) : '', }; }; @@ -166,6 +190,7 @@ export class ChartEntity implements Entity { return ( { domain={data.domain?.domain} dataProduct={getDataProduct(genericProperties?.dataProduct)} parentContainers={data.parentContainers} + health={data.health} /> ); }; @@ -187,6 +213,7 @@ export class ChartEntity implements Entity { return ( { } degree={(result as any).degree} paths={(result as any).paths} + health={data.health} /> ); }; @@ -222,6 +250,8 @@ export class ChartEntity implements Entity { type: EntityType.Chart, icon: entity?.platform?.properties?.logoUrl || undefined, platform: entity?.platform, + subtype: entity?.subTypes?.typeNames?.[0] || undefined, + health: entity?.health || undefined, }; }; @@ -253,7 +283,7 @@ export class ChartEntity implements Entity { ); diff --git a/datahub-web-react/src/app/entity/chart/ChartQueryTab.tsx b/datahub-web-react/src/app/entity/chart/ChartQueryTab.tsx new file mode 100644 index 00000000000000..7c28f4be88d8d5 --- /dev/null +++ b/datahub-web-react/src/app/entity/chart/ChartQueryTab.tsx @@ -0,0 +1,61 @@ +import { Typography } from 'antd'; +import React from 'react'; +import styled from 'styled-components'; +import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; +import { GetChartQuery } from '../../../graphql/chart.generated'; +import { ANTD_GRAY } from '../shared/constants'; +import { useBaseEntity } from '../shared/EntityContext'; +import { InfoItem } from '../shared/components/styled/InfoItem'; + +const InfoSection = styled.div` + border-bottom: 1px solid ${ANTD_GRAY[4.5]}; + padding: 16px 20px; +`; + +const InfoItemContainer = styled.div<{ justifyContent }>` + display: flex; + position: relative; + justify-content: ${(props) => props.justifyContent}; + padding: 12px 2px; +`; + +const InfoItemContent = styled.div` + padding-top: 8px; +`; + +const QueryText = styled(Typography.Paragraph)` + margin-top: 20px; + background-color: ${ANTD_GRAY[2]}; +`; + +// NOTE: Yes, using `!important` is a shame. However, the SyntaxHighlighter is applying styles directly +// to the component, so there's no way around this +const NestedSyntax = styled(SyntaxHighlighter)` + background-color: transparent !important; + border: none !important; +`; + +export function ChartQueryTab() { + const baseEntity = useBaseEntity(); + const query = baseEntity?.chart?.query?.rawQuery || 'UNKNOWN'; + const type = baseEntity?.chart?.query?.type || 'UNKNOWN'; + + return ( + <> + + Details + + + {type.toUpperCase()} + + + + + Query + + {query} + + + + ); +} diff --git a/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx b/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx index 7d0fc143043e29..adb75aa7045271 100644 --- a/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx +++ b/datahub-web-react/src/app/entity/chart/preview/ChartPreview.tsx @@ -13,8 +13,10 @@ import { ChartStatsSummary, DataProduct, EntityPath, + Health, } from '../../../../types.generated'; import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; +import { capitalizeFirstLetterOnly } from '../../../shared/textUtil'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { IconStyleType } from '../../Entity'; import { ChartStatsSummary as ChartStatsSummaryView } from '../shared/ChartStatsSummary'; @@ -43,6 +45,8 @@ export const ChartPreview = ({ snippet, degree, paths, + subType, + health, }: { urn: string; platform?: string; @@ -67,6 +71,8 @@ export const ChartPreview = ({ snippet?: React.ReactNode | null; degree?: number; paths?: EntityPath[]; + subType?: string | null; + health?: Health[] | null; }): JSX.Element => { const entityRegistry = useEntityRegistry(); @@ -76,7 +82,7 @@ export const ChartPreview = ({ name={name || ''} urn={urn} description={description || ''} - type="Chart" + type={capitalizeFirstLetterOnly(subType) || 'Chart'} typeIcon={entityRegistry.getIcon(EntityType.Chart, 14, IconStyleType.ACCENT)} logoUrl={logoUrl || ''} platform={platform} @@ -103,6 +109,7 @@ export const ChartPreview = ({ } degree={degree} paths={paths} + health={health || undefined} /> ); }; diff --git a/datahub-web-react/src/app/entity/container/ContainerEntity.tsx b/datahub-web-react/src/app/entity/container/ContainerEntity.tsx index 9aecf6900f6341..9cd32cf33a013f 100644 --- a/datahub-web-react/src/app/entity/container/ContainerEntity.tsx +++ b/datahub-web-react/src/app/entity/container/ContainerEntity.tsx @@ -63,11 +63,13 @@ export class ContainerEntity implements Entity { getCollectionName = () => 'Containers'; + useEntityQuery = useGetContainerQuery; + renderProfile = (urn: string) => ( { component: PropertiesTab, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, - // TODO: Add back once entity-level recommendations are complete. - // { - // component: SidebarRecommendationsSection, - // }, - ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + // TODO: Add back once entity-level recommendations are complete. + // { + // component: SidebarRecommendationsSection, + // }, + ]; + renderPreview = (_: PreviewType, data: Container) => { const genericProperties = this.getGenericEntityProperties(data); return ( @@ -167,6 +171,7 @@ export class ContainerEntity implements Entity { getOverridePropertiesFromEntity = (data: Container) => { return { name: this.displayName(data), + externalUrl: data.properties?.externalUrl, entityCount: data.entities?.total, }; }; @@ -194,7 +199,7 @@ export class ContainerEntity implements Entity { ); diff --git a/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx b/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx index 0a36d0e5f1bfad..9564cbc18198e4 100644 --- a/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx +++ b/datahub-web-react/src/app/entity/dashboard/DashboardEntity.tsx @@ -31,6 +31,7 @@ import { getDataProduct } from '../shared/utils'; import { LOOKER_URN } from '../../ingest/source/builder/constants'; import { MatchedFieldList } from '../../search/matches/MatchedFieldList'; import { matchedInputFieldRenderer } from '../../search/matches/matchedInputFieldRenderer'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; /** * Definition of the DataHub Dashboard entity. @@ -77,14 +78,41 @@ export class DashboardEntity implements Entity { getCollectionName = () => 'Dashboards'; + useEntityQuery = useGetDashboardQuery; + + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderProfile = (urn: string) => ( { name: 'Properties', component: PropertiesTab, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dashboard) => { + const activeIncidentCount = dashboard?.dashboard?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; }, }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, ]} + sidebarSections={this.getSidebarSections()} /> ); @@ -197,6 +210,7 @@ export class DashboardEntity implements Entity { lastUpdatedMs={data.properties?.lastModified?.time} createdMs={data.properties?.created?.time} subtype={data.subTypes?.typeNames?.[0]} + health={data.health} /> ); }; @@ -236,6 +250,7 @@ export class DashboardEntity implements Entity { subtype={data.subTypes?.typeNames?.[0]} degree={(result as any).degree} paths={(result as any).paths} + health={data.health} /> ); }; @@ -248,6 +263,7 @@ export class DashboardEntity implements Entity { subtype: entity?.subTypes?.typeNames?.[0] || undefined, icon: entity?.platform?.properties?.logoUrl || undefined, platform: entity?.platform, + health: entity?.health || undefined, }; }; @@ -279,7 +295,7 @@ export class DashboardEntity implements Entity { ); diff --git a/datahub-web-react/src/app/entity/dashboard/preview/DashboardPreview.tsx b/datahub-web-react/src/app/entity/dashboard/preview/DashboardPreview.tsx index d822fd1f613b39..78e87b8f141cc4 100644 --- a/datahub-web-react/src/app/entity/dashboard/preview/DashboardPreview.tsx +++ b/datahub-web-react/src/app/entity/dashboard/preview/DashboardPreview.tsx @@ -13,6 +13,7 @@ import { DashboardStatsSummary, DataProduct, EntityPath, + Health, } from '../../../../types.generated'; import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; import { useEntityRegistry } from '../../../useEntityRegistry'; @@ -46,6 +47,7 @@ export const DashboardPreview = ({ snippet, degree, paths, + health, }: { urn: string; platform?: string; @@ -72,6 +74,7 @@ export const DashboardPreview = ({ snippet?: React.ReactNode | null; degree?: number; paths?: EntityPath[]; + health?: Health[] | null; }): JSX.Element => { const entityRegistry = useEntityRegistry(); @@ -110,6 +113,7 @@ export const DashboardPreview = ({ } degree={degree} paths={paths} + health={health || undefined} /> ); }; diff --git a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx index e8fb4c16aca9c6..a359d658d27f78 100644 --- a/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dashboard/shared/DashboardStatsSummary.tsx @@ -11,6 +11,8 @@ import ExpandingStat from '../../dataset/shared/ExpandingStat'; const StatText = styled.span` color: ${ANTD_GRAY[8]}; + @media (min-width: 1024px) { + white-space: nowrap; `; const HelpIcon = styled(QuestionCircleOutlined)` diff --git a/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx b/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx index 3bf24ac276c8ed..fb5ea280087412 100644 --- a/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx +++ b/datahub-web-react/src/app/entity/dataFlow/DataFlowEntity.tsx @@ -18,6 +18,7 @@ import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import { capitalizeFirstLetterOnly } from '../../shared/textUtil'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; /** * Definition of the DataHub DataFlow entity. @@ -58,14 +59,16 @@ export class DataFlowEntity implements Entity { getCollectionName = () => 'Pipelines'; + useEntityQuery = useGetDataFlowQuery; + renderProfile = (urn: string) => ( { { name: 'Tasks', component: DataFlowJobsTab, - }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, + urn, }, }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataFlow) => { + const activeIncidentCount = dataFlow?.dataFlow?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; }, }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + getOverridePropertiesFromEntity = (dataFlow?: DataFlow | null): GenericEntityProperties => { // TODO: Get rid of this once we have correctly formed platform coming back. const name = dataFlow?.properties?.name; @@ -133,6 +149,7 @@ export class DataFlowEntity implements Entity { domain={data.domain?.domain} dataProduct={getDataProduct(genericProperties?.dataProduct)} externalUrl={data.properties?.externalUrl} + health={data.health} /> ); }; @@ -160,6 +177,7 @@ export class DataFlowEntity implements Entity { deprecation={data.deprecation} degree={(result as any).degree} paths={(result as any).paths} + health={data.health} /> ); }; diff --git a/datahub-web-react/src/app/entity/dataFlow/preview/Preview.tsx b/datahub-web-react/src/app/entity/dataFlow/preview/Preview.tsx index c313171d2f2419..f210f7c985ebf7 100644 --- a/datahub-web-react/src/app/entity/dataFlow/preview/Preview.tsx +++ b/datahub-web-react/src/app/entity/dataFlow/preview/Preview.tsx @@ -8,6 +8,7 @@ import { EntityPath, EntityType, GlobalTags, + Health, Owner, SearchInsight, } from '../../../../types.generated'; @@ -38,6 +39,7 @@ export const Preview = ({ deprecation, degree, paths, + health, }: { urn: string; name: string; @@ -56,6 +58,7 @@ export const Preview = ({ jobCount?: number | null; degree?: number; paths?: EntityPath[]; + health?: Health[] | null; }): JSX.Element => { const entityRegistry = useEntityRegistry(); return ( @@ -87,6 +90,7 @@ export const Preview = ({ } degree={degree} paths={paths} + health={health || undefined} /> ); }; diff --git a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx index 29741119ac52b3..fe1a906371e9d0 100644 --- a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx +++ b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx @@ -21,6 +21,7 @@ import { DataFlowEntity } from '../dataFlow/DataFlowEntity'; import { capitalizeFirstLetterOnly } from '../../shared/textUtil'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { IncidentTab } from '../shared/tabs/Incident/IncidentTab'; const getDataJobPlatformName = (data?: DataJob): string => { return ( @@ -69,14 +70,16 @@ export class DataJobEntity implements Entity { getCollectionName = () => 'Tasks'; + useEntityQuery = useGetDataJobQuery; + renderProfile = (urn: string) => ( { enabled: (_, dataJob: GetDataJobQuery) => (dataJob?.dataJob?.runs?.total || 0) !== 0, }, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataJob) => { + const activeIncidentCount = dataJob?.dataJob?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; }, }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + getOverridePropertiesFromEntity = (dataJob?: DataJob | null): GenericEntityProperties => { // TODO: Get rid of this once we have correctly formed platform coming back. const name = dataJob?.properties?.name; @@ -147,6 +160,7 @@ export class DataJobEntity implements Entity { { domain={data.domain?.domain} dataProduct={getDataProduct(genericProperties?.dataProduct)} externalUrl={data.properties?.externalUrl} + health={data.health} /> ); }; @@ -166,6 +181,7 @@ export class DataJobEntity implements Entity { { } degree={(result as any).degree} paths={(result as any).paths} + health={data.health} /> ); }; @@ -212,6 +229,7 @@ export class DataJobEntity implements Entity { type: EntityType.DataJob, icon: entity?.dataFlow?.platform?.properties?.logoUrl || undefined, platform: entity?.dataFlow?.platform, + health: entity?.health || undefined, }; }; diff --git a/datahub-web-react/src/app/entity/dataJob/preview/Preview.tsx b/datahub-web-react/src/app/entity/dataJob/preview/Preview.tsx index 61963ff2dce6b9..b163722b5151c7 100644 --- a/datahub-web-react/src/app/entity/dataJob/preview/Preview.tsx +++ b/datahub-web-react/src/app/entity/dataJob/preview/Preview.tsx @@ -10,6 +10,7 @@ import { EntityPath, EntityType, GlobalTags, + Health, Owner, SearchInsight, } from '../../../../types.generated'; @@ -26,6 +27,7 @@ const StatText = styled(Typography.Text)` export const Preview = ({ urn, name, + subType, description, platformName, platformLogo, @@ -41,9 +43,11 @@ export const Preview = ({ externalUrl, degree, paths, + health, }: { urn: string; name: string; + subType?: string | null; description?: string | null; platformName: string; platformLogo?: string | null; @@ -59,6 +63,7 @@ export const Preview = ({ externalUrl?: string | null; degree?: number; paths?: EntityPath[]; + health?: Health[] | null; }): JSX.Element => { const entityRegistry = useEntityRegistry(); return ( @@ -67,7 +72,7 @@ export const Preview = ({ name={name} urn={urn} description={description || ''} - type="Data Task" + type={subType || 'Data Task'} typeIcon={entityRegistry.getIcon(EntityType.DataJob, 14, IconStyleType.ACCENT)} platform={platformName} logoUrl={platformLogo || ''} @@ -92,6 +97,7 @@ export const Preview = ({ } degree={degree} paths={paths} + health={health || undefined} /> ); }; diff --git a/datahub-web-react/src/app/entity/dataJob/tabs/RunsTab.tsx b/datahub-web-react/src/app/entity/dataJob/tabs/RunsTab.tsx index 24478e2eaaa4dc..5cd621719ce8fd 100644 --- a/datahub-web-react/src/app/entity/dataJob/tabs/RunsTab.tsx +++ b/datahub-web-react/src/app/entity/dataJob/tabs/RunsTab.tsx @@ -13,7 +13,7 @@ import { import { CompactEntityNameList } from '../../../recommendations/renderer/component/CompactEntityNameList'; import { ANTD_GRAY } from '../../shared/constants'; import { useEntityData } from '../../shared/EntityContext'; -import { ReactComponent as LoadingSvg } from '../../../../images/datahub-logo-color-loading_pendulum.svg'; +import LoadingSvg from '../../../../images/datahub-logo-color-loading_pendulum.svg?react'; import { scrollToTop } from '../../../shared/searchUtils'; const ExternalUrlLink = styled.a` diff --git a/datahub-web-react/src/app/entity/dataPlatform/DataPlatformEntity.tsx b/datahub-web-react/src/app/entity/dataPlatform/DataPlatformEntity.tsx index 548cbdafda9e4e..6687ec9f914c1b 100644 --- a/datahub-web-react/src/app/entity/dataPlatform/DataPlatformEntity.tsx +++ b/datahub-web-react/src/app/entity/dataPlatform/DataPlatformEntity.tsx @@ -3,6 +3,7 @@ import { DatabaseOutlined } from '@ant-design/icons'; import { DataPlatform, EntityType, SearchResult } from '../../../types.generated'; import { Entity, IconStyleType, PreviewType } from '../Entity'; import { GenericEntityProperties } from '../shared/types'; +import { useGetDataPlatformQuery } from '../../../graphql/dataPlatform.generated'; const getDisplayName = (data?: DataPlatform): string => { return data?.properties?.displayName || data?.name || ''; @@ -43,6 +44,8 @@ export class DataPlatformEntity implements Entity { // Currently unused. getCollectionName = () => 'Data Platforms'; + useEntityQuery = useGetDataPlatformQuery; + // Currently unused. renderProfile = (_: string) => <>; diff --git a/datahub-web-react/src/app/entity/dataProduct/DataProductEntity.tsx b/datahub-web-react/src/app/entity/dataProduct/DataProductEntity.tsx index 620d42943a74ab..6b31de84f85bb1 100644 --- a/datahub-web-react/src/app/entity/dataProduct/DataProductEntity.tsx +++ b/datahub-web-react/src/app/entity/dataProduct/DataProductEntity.tsx @@ -63,6 +63,8 @@ export class DataProductEntity implements Entity { getCollectionName = () => 'Data Products'; + useEntityQuery = useGetDataProductQuery; + renderProfile = (urn: string) => ( { component: PropertiesTab, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarViewDefinitionSection, - display: { - // to do - change when we have a GetDataProductQuery - visible: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.viewProperties?.logic && true) || false, - }, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - properties: { - updateOnly: true, - }, - }, - ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarViewDefinitionSection, + display: { + // to do - change when we have a GetDataProductQuery + visible: (_, dataset: GetDatasetQuery) => (dataset?.dataset?.viewProperties?.logic && true) || false, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + properties: { + updateOnly: true, + }, + }, + ]; + renderPreview = (_: PreviewType, data: DataProduct) => { return ( { getCollectionName = () => 'Datasets'; + useEntityQuery = useGetDatasetQuery; + renderProfile = (urn: string) => ( { name: 'Schema', component: SchemaTab, }, + { + name: 'Relationships', + component: RelationshipsTab, + display: { + visible: (_, _1) => false, + enabled: (_, _2) => false, + }, + }, { name: 'View Definition', component: ViewDefinitionTab, display: { visible: (_, dataset: GetDatasetQuery) => - dataset?.dataset?.subTypes?.typeNames + !!dataset?.dataset?.viewProperties?.logic || + !!dataset?.dataset?.subTypes?.typeNames ?.map((t) => t.toLocaleLowerCase()) - .includes(SUBTYPES.VIEW.toLocaleLowerCase()) || false, - enabled: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.viewProperties?.logic && true) || false, + .includes(SUBTYPES.VIEW.toLocaleLowerCase()), + enabled: (_, dataset: GetDatasetQuery) => !!dataset?.dataset?.viewProperties?.logic, }, }, { @@ -153,30 +167,34 @@ export class DatasetEntity implements Entity { }, }, { - name: 'Validation', + name: 'Quality', component: ValidationsTab, display: { visible: (_, _1) => true, enabled: (_, dataset: GetDatasetQuery) => { - return ( - (dataset?.dataset?.assertions?.total || 0) > 0 || dataset?.dataset?.testResults !== null - ); + return (dataset?.dataset?.assertions?.total || 0) > 0; }, }, }, { - name: 'Operations', + name: 'Governance', + component: GovernanceTab, + display: { + visible: (_, _1) => true, + enabled: (_, dataset: GetDatasetQuery) => { + return dataset?.dataset?.testResults !== null; + }, + }, + }, + { + name: 'Runs', // TODO: Rename this to DatasetRunsTab. component: OperationsTab, display: { visible: (_, dataset: GetDatasetQuery) => { - return ( - (dataset?.dataset?.readRuns?.total || 0) + (dataset?.dataset?.writeRuns?.total || 0) > 0 - ); + return (dataset?.dataset?.runs?.total || 0) > 0; }, enabled: (_, dataset: GetDatasetQuery) => { - return ( - (dataset?.dataset?.readRuns?.total || 0) + (dataset?.dataset?.writeRuns?.total || 0) > 0 - ); + return (dataset?.dataset?.runs?.total || 0) > 0; }, }, }, @@ -185,55 +203,67 @@ export class DatasetEntity implements Entity { component: AccessManagement, display: { visible: (_, _1) => this.appconfig().config.featureFlags.showAccessManagement, - enabled: (_, _2) => true, - }, - }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarSiblingsSection, - display: { - visible: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.siblings?.siblings?.length || 0) > 0, - }, - }, - { - component: SidebarViewDefinitionSection, - display: { - visible: (_, dataset: GetDatasetQuery) => - (dataset?.dataset?.viewProperties?.logic && true) || false, + enabled: (_, dataset: GetDatasetQuery) => { + const accessAspect = dataset?.dataset?.access; + const rolesList = accessAspect?.roles; + return !!accessAspect && !!rolesList && rolesList.length > 0; + }, }, }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, + name: 'Incidents', + component: IncidentTab, + getDynamicName: (_, dataset) => { + const activeIncidentCount = dataset?.dataset?.activeIncidents.total; + return `Incidents${(activeIncidentCount && ` (${activeIncidentCount})`) || ''}`; }, }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, - // TODO: Add back once entity-level recommendations are complete. - // { - // component: SidebarRecommendationsSection, - // }, ]} + sidebarSections={this.getSidebarSections()} + isNameEditable /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarSiblingsSection, + display: { + visible: (_, dataset: GetDatasetQuery) => (dataset?.dataset?.siblings?.siblings?.length || 0) > 0, + }, + }, + { + component: SidebarViewDefinitionSection, + display: { + visible: (_, dataset: GetDatasetQuery) => !!dataset?.dataset?.viewProperties?.logic, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, // TODO: Add back once entity-level recommendations are complete. + // { + // component: SidebarRecommendationsSection, + // }, + ]; + getOverridePropertiesFromEntity = (dataset?: Dataset | null): GenericEntityProperties => { // if dataset has subTypes filled out, pick the most specific subtype and return it const subTypes = dataset?.subTypes; @@ -254,7 +284,7 @@ export class DatasetEntity implements Entity { return ( { return ( { rowCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].rowCount} columnCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].columnCount} sizeInBytes={(data as any).lastProfile?.length && (data as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (data as any).lastOperation?.length && (data as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs(data.properties, (data as any)?.lastOperation)} health={data.health} degree={(result as any).degree} paths={(result as any).paths} @@ -334,7 +362,7 @@ export class DatasetEntity implements Entity { }; displayName = (data: Dataset) => { - return data?.properties?.name || data.name || data.urn; + return data?.editableProperties?.name || data?.properties?.name || data.name || data.urn; }; platformLogoUrl = (data: Dataset) => { @@ -365,7 +393,7 @@ export class DatasetEntity implements Entity { ); diff --git a/datahub-web-react/src/app/entity/dataset/profile/OperationsTab.tsx b/datahub-web-react/src/app/entity/dataset/profile/OperationsTab.tsx index 1ffcb5716daa82..78ec334f071ba4 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/OperationsTab.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/OperationsTab.tsx @@ -1,13 +1,13 @@ import { DeliveredProcedureOutlined } from '@ant-design/icons'; -import { Button, Pagination, Table, Tooltip, Typography } from 'antd'; -import ButtonGroup from 'antd/lib/button/button-group'; +import { Pagination, Table, Tooltip, Typography } from 'antd'; import React, { useState } from 'react'; import styled from 'styled-components'; -import { useGetDatasetRunsQuery } from '../../../../graphql/dataset.generated'; +import { GetDatasetRunsQuery, useGetDatasetRunsQuery } from '../../../../graphql/dataset.generated'; import { DataProcessInstanceRunResultType, DataProcessRunStatus, + EntityType, RelationshipDirection, } from '../../../../types.generated'; import { @@ -18,8 +18,10 @@ import { import { CompactEntityNameList } from '../../../recommendations/renderer/component/CompactEntityNameList'; import { ANTD_GRAY } from '../../shared/constants'; import { useEntityData } from '../../shared/EntityContext'; -import { ReactComponent as LoadingSvg } from '../../../../images/datahub-logo-color-loading_pendulum.svg'; +import LoadingSvg from '../../../../images/datahub-logo-color-loading_pendulum.svg?react'; import { scrollToTop } from '../../../shared/searchUtils'; +import { formatDuration } from '../../../shared/formatDuration'; +import { notEmpty } from '../../shared/utils'; const ExternalUrlLink = styled.a` font-size: 16px; @@ -32,10 +34,6 @@ const PaginationControlContainer = styled.div` text-align: center; `; -const ReadWriteButtonGroup = styled(ButtonGroup)` - padding: 12px; -`; - const LoadingText = styled.div` margin-top: 18px; font-size: 12px; @@ -67,6 +65,12 @@ const columns = [ {new Date(Number(value)).toLocaleString()} ), }, + { + title: 'Duration', + dataIndex: 'duration', + key: 'duration', + render: (durationMs: number) => formatDuration(durationMs), + }, { title: 'Run ID', dataIndex: 'name', @@ -129,14 +133,59 @@ const columns = [ const PAGE_SIZE = 20; export const OperationsTab = () => { - const { urn } = useEntityData(); + const { urn, entityData } = useEntityData(); const [page, setPage] = useState(1); - const [direction, setDirection] = useState(RelationshipDirection.Incoming); - const { loading, data } = useGetDatasetRunsQuery({ - variables: { urn, start: (page - 1) * PAGE_SIZE, count: PAGE_SIZE, direction }, + // Fetch data across all siblings. + const allUrns = [urn, ...(entityData?.siblings?.siblings || []).map((sibling) => sibling?.urn).filter(notEmpty)]; + const loadings: boolean[] = []; + const datas: GetDatasetRunsQuery[] = []; + allUrns.forEach((entityUrn) => { + // Because there's a consistent number and order of the urns, + // this usage of a hook within a loop should be safe. + // eslint-disable-next-line react-hooks/rules-of-hooks + const { loading, data } = useGetDatasetRunsQuery({ + variables: { + urn: entityUrn, + start: (page - 1) * PAGE_SIZE, + count: PAGE_SIZE, + direction: RelationshipDirection.Outgoing, + }, + }); + loadings.push(loading); + if (data) { + datas.push(data); + } }); - const runs = data && data?.dataset?.runs?.runs; + + const loading = loadings.some((loadingEntry) => loadingEntry); + + // Merge the runs data from all entities. + // If there's more than one entity contributing to the data, then we can't do pagination. + let canPaginate = true; + let dataRuns: NonNullable['runs'] | undefined; + if (datas.length > 0) { + let numWithRuns = 0; + for (let i = 0; i < datas.length; i++) { + if (datas[i]?.dataset?.runs?.total) { + numWithRuns++; + } + + if (dataRuns && dataRuns.runs) { + dataRuns.runs.push(...(datas[i]?.dataset?.runs?.runs || [])); + dataRuns.total = (dataRuns.total ?? 0) + (datas[i]?.dataset?.runs?.total ?? 0); + } else { + dataRuns = JSON.parse(JSON.stringify(datas[i]?.dataset?.runs)); + } + } + + if (numWithRuns > 1) { + canPaginate = false; + } + } + + // This also sorts the runs data across all entities. + const runs = dataRuns?.runs?.sort((a, b) => (b?.created?.time ?? 0) - (a?.created?.time ?? 0)); const tableData = runs ?.filter((run) => run) @@ -145,33 +194,27 @@ export const OperationsTab = () => { name: run?.name, status: run?.state?.[0]?.status, resultType: run?.state?.[0]?.result?.resultType, + duration: run?.state?.[0]?.durationMillis, inputs: run?.inputs?.relationships.map((relationship) => relationship.entity), outputs: run?.outputs?.relationships.map((relationship) => relationship.entity), externalUrl: run?.externalUrl, parentTemplate: run?.parentTemplate?.relationships?.[0]?.entity, })); + // If the table contains jobs, we need to show the job-related columns. Otherwise we can simplify the table. + const containsJobs = tableData?.some((run) => run.parentTemplate?.type !== EntityType.Dataset); + const simplifiedColumns = containsJobs + ? columns + : columns.filter((column) => !['name', 'inputs', 'outputs'].includes(column.key)); + const onChangePage = (newPage: number) => { scrollToTop(); setPage(newPage); }; + // TODO: Much of this file is duplicated from RunsTab.tsx. We should refactor this to share code. return ( <> - - - - {loading && ( @@ -180,17 +223,19 @@ export const OperationsTab = () => { )} {!loading && ( <> - - - - +
+ {canPaginate && ( + + + + )} )} diff --git a/datahub-web-react/src/app/entity/dataset/profile/__tests__/Schema.test.tsx b/datahub-web-react/src/app/entity/dataset/profile/__tests__/Schema.test.tsx index 8c9994375c6346..2cb155d43d302b 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/__tests__/Schema.test.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/__tests__/Schema.test.tsx @@ -13,13 +13,12 @@ import { mocks } from '../../../../../Mocks'; import { SchemaTab } from '../../../shared/tabs/Dataset/Schema/SchemaTab'; import EntityContext from '../../../shared/EntityContext'; import { EntityType, SchemaMetadata } from '../../../../../types.generated'; +import { SchemaRow } from '../../../shared/tabs/Dataset/Schema/components/SchemaRow'; -jest.mock('virtualizedtableforantd4', () => { - /* eslint-disable-next-line */ - const { SchemaRow } = require('../../../shared/tabs/Dataset/Schema/components/SchemaRow'); +vi.mock('virtualizedtableforantd4', async () => { return { - ...jest.requireActual('virtualizedtableforantd4'), - useVT: () => [{ body: { row: SchemaRow } }, jest.fn()], + ...(await vi.importActual('virtualizedtableforantd4')), + useVT: () => [{ body: { row: SchemaRow } }, vi.fn()], }; }); @@ -33,13 +32,15 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', schemaMetadata: sampleSchema as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -62,13 +63,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchema as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -99,13 +105,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithTags as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -126,13 +137,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithTags as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -152,13 +168,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithTags as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -178,13 +199,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithPkFk as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -204,13 +230,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithPkFk as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -236,13 +267,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithKeyValueFields as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -275,13 +311,18 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchemaWithoutFields as SchemaMetadata, }, baseEntity: {}, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > @@ -327,7 +368,9 @@ describe('Schema', () => { urn: 'urn:li:dataset:123', entityType: EntityType.Dataset, entityData: { - description: 'This is a description', + properties: { + description: 'This is a description', + }, schemaMetadata: sampleSchema as SchemaMetadata, }, baseEntity: { @@ -336,9 +379,12 @@ describe('Schema', () => { usageStats, }, }, - updateEntity: jest.fn(), - routeToTab: jest.fn(), - refetch: jest.fn(), + updateEntity: vi.fn(), + routeToTab: vi.fn(), + refetch: vi.fn(), + loading: true, + lineage: undefined, + dataNotCombinedWithSiblings: null, }} > diff --git a/datahub-web-react/src/app/entity/dataset/profile/__tests__/SchemaDescriptionField.test.tsx b/datahub-web-react/src/app/entity/dataset/profile/__tests__/SchemaDescriptionField.test.tsx index a7878b6a851941..3b3068e45e630e 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/__tests__/SchemaDescriptionField.test.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/__tests__/SchemaDescriptionField.test.tsx @@ -68,7 +68,7 @@ describe('SchemaDescriptionField', () => { const longDescription = 'really long description over 80 characters, really long description over 80 characters, really long description over 80 characters, really long description over 80 characters, really long description over 80 characters'; it('renders longer messages with show more when not expanded', () => { - const onClick = jest.fn(); + const onClick = vi.fn(); const { getByText, queryByText } = render( { }); it('renders longer messages with show less when expanded', () => { - const onClick = jest.fn(); + const onClick = vi.fn(); const { getByText } = render( { - it('should render last observed text if lastObserved is not null', () => { - const { getByText, queryByText } = render(); - expect(getByText(`Last observed ${toRelativeTimeString(123)}`)).toBeInTheDocument(); - expect(queryByText(`Reported ${toRelativeTimeString(123)}`)).toBeNull(); - }); - - it('should render last updated text if lastObserved is null', () => { - const { getByText, queryByText } = render(); - expect(queryByText(`Last observed ${toRelativeTimeString(123)}`)).toBeNull(); - expect(getByText(`Reported ${toRelativeTimeString(123)}`)).toBeInTheDocument(); - }); - - it('should return null if lastUpdated and lastObserved are both null', () => { - const { container } = render(); - expect(container.firstChild).toBeNull(); - }); -}); diff --git a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx index 1d4f155f797e06..ce8d03fbdc9602 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaDescriptionField.tsx @@ -11,6 +11,7 @@ import SchemaEditableContext from '../../../../../shared/SchemaEditableContext'; import { useEntityData } from '../../../../shared/EntityContext'; import analytics, { EventType, EntityActionType } from '../../../../../analytics'; import { Editor } from '../../../../shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../../../../shared/constants'; const EditIcon = styled(EditOutlined)` cursor: pointer; @@ -77,32 +78,57 @@ const StyledViewer = styled(Editor)` } `; +const AttributeDescription = styled.div` + margin-top: 8px; + color: ${ANTD_GRAY[7]}; +`; + +const StyledAttributeViewer = styled(Editor)` + padding-right: 8px; + display: block; + .remirror-editor.ProseMirror { + padding: 0; + color: ${ANTD_GRAY[7]}; + } +`; + type Props = { onExpanded: (expanded: boolean) => void; + onBAExpanded?: (expanded: boolean) => void; expanded: boolean; + baExpanded?: boolean; description: string; original?: string | null; onUpdate: ( description: string, ) => Promise, Record> | void>; isEdited?: boolean; + isReadOnly?: boolean; + businessAttributeDescription?: string; }; const ABBREVIATED_LIMIT = 80; export default function DescriptionField({ expanded, + baExpanded, onExpanded: handleExpanded, + onBAExpanded: handleBAExpanded, description, onUpdate, isEdited = false, original, + isReadOnly, + businessAttributeDescription, }: Props) { const [showAddModal, setShowAddModal] = useState(false); const overLimit = removeMarkdown(description).length > 80; - const isSchemaEditable = React.useContext(SchemaEditableContext); + const isSchemaEditable = React.useContext(SchemaEditableContext) && !isReadOnly; const onCloseModal = () => setShowAddModal(false); const { urn, entityType } = useEntityData(); + const attributeDescriptionOverLimit = businessAttributeDescription + ? removeMarkdown(businessAttributeDescription).length > 80 + : false; const sendAnalytics = () => { analytics.event({ @@ -140,11 +166,12 @@ export default function DescriptionField({ {expanded || !overLimit ? ( <> {!!description && } - {!!description && ( + {!!description && (EditButton || overLimit) && ( {overLimit && ( { + onClick={(e) => { + e.stopPropagation(); handleExpanded(false); }} > @@ -162,7 +189,8 @@ export default function DescriptionField({ readMore={ <> { + onClick={(e) => { + e.stopPropagation(); handleExpanded(true); }} > @@ -177,7 +205,7 @@ export default function DescriptionField({ )} - {isSchemaEditable && isEdited && (edited)} + {isEdited && (edited)} {showAddModal && (
)} + + {baExpanded || !attributeDescriptionOverLimit ? ( + <> + {!!businessAttributeDescription && ( + + )} + {!!businessAttributeDescription && ( + + {attributeDescriptionOverLimit && ( + { + e.stopPropagation(); + if (handleBAExpanded) { + handleBAExpanded(false); + } + }} + > + Read Less + + )} + + )} + + ) : ( + <> + + { + e.stopPropagation(); + if (handleBAExpanded) { + handleBAExpanded(true); + } + }} + > + Read More + + + } + shouldWrap + > + {businessAttributeDescription} + + + )} + ); } diff --git a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx index 9e9e0ede2a1cef..2fc8fc11cd1b29 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/schema/components/SchemaHeader.tsx @@ -17,7 +17,6 @@ import { SemanticVersionStruct } from '../../../../../../types.generated'; import { toRelativeTimeString } from '../../../../../shared/time/timeUtils'; import { ANTD_GRAY, REDESIGN_COLORS } from '../../../../shared/constants'; import { navigateToVersionedDatasetUrl } from '../../../../shared/tabs/Dataset/Schema/utils/navigateToVersionedDatasetUrl'; -import SchemaTimeStamps from './SchemaTimeStamps'; import getSchemaFilterFromQueryString from '../../../../shared/tabs/Dataset/Schema/utils/getSchemaFilterFromQueryString'; const SchemaHeaderContainer = styled.div` @@ -137,8 +136,6 @@ type Props = { hasKeySchema: boolean; showKeySchema: boolean; setShowKeySchema: (show: boolean) => void; - lastUpdated?: number | null; - lastObserved?: number | null; selectedVersion: string; versionList: Array; showSchemaAuditView: boolean; @@ -158,8 +155,6 @@ export default function SchemaHeader({ hasKeySchema, showKeySchema, setShowKeySchema, - lastUpdated, - lastObserved, selectedVersion, versionList, showSchemaAuditView, @@ -255,7 +250,6 @@ export default function SchemaHeader({ )} - - {lastObserved && ( - Last observed on {toLocalDateTimeString(lastObserved)}. - )} - {lastUpdated &&
First reported on {toLocalDateTimeString(lastUpdated)}.
} - - } - > - - {lastObserved && ( - - Last observed {toRelativeTimeString(lastObserved)} - - )} - {!lastObserved && lastUpdated && ( - - - Reported {toRelativeTimeString(lastUpdated)} - - )} - - - ); -} - -export default SchemaTimeStamps; diff --git a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx index 36b7d251950b4c..bbac8ecd2c61ee 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx @@ -3,8 +3,9 @@ import { DatasetStatsSummary as DatasetStatsSummaryObj } from '../../../../../.. import { useBaseEntity } from '../../../../shared/EntityContext'; import { GetDatasetQuery } from '../../../../../../graphql/dataset.generated'; import { DatasetStatsSummary } from '../../../shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../../shared/utils'; -export const DatasetStatsSummarySubHeader = () => { +export const DatasetStatsSummarySubHeader = ({ properties }: { properties?: any }) => { const result = useBaseEntity(); const dataset = result?.dataset; @@ -13,15 +14,13 @@ export const DatasetStatsSummarySubHeader = () => { const maybeLastProfile = dataset?.datasetProfiles && dataset.datasetProfiles.length ? dataset.datasetProfiles[0] : undefined; - const maybeLastOperation = dataset?.operations && dataset.operations.length ? dataset.operations[0] : undefined; - const rowCount = maybeLastProfile?.rowCount; const columnCount = maybeLastProfile?.columnCount; const sizeInBytes = maybeLastProfile?.sizeInBytes; const totalSqlQueries = dataset?.usageStats?.aggregations?.totalSqlQueries; const queryCountLast30Days = maybeStatsSummary?.queryCountLast30Days; const uniqueUserCountLast30Days = maybeStatsSummary?.uniqueUserCountLast30Days; - const lastUpdatedMs = maybeLastOperation?.lastUpdatedTimestamp; + const lastUpdatedMs = getLastUpdatedMs(dataset?.properties, dataset?.operations); return ( { queryCountLast30Days={queryCountLast30Days} uniqueUserCountLast30Days={uniqueUserCountLast30Days} lastUpdatedMs={lastUpdatedMs} + shouldWrap={properties?.shouldWrap} /> ); }; diff --git a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx index 14f550de25be76..8e9102145cfa53 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx @@ -12,6 +12,8 @@ import ExpandingStat from './ExpandingStat'; const StatText = styled.span<{ color: string }>` color: ${(props) => props.color}; + @media (min-width: 1160px) { + white-space: nowrap; `; const PopoverContent = styled.div` @@ -28,6 +30,7 @@ type Props = { lastUpdatedMs?: number | null; color?: string; mode?: 'normal' | 'tooltip-content'; + shouldWrap?: boolean; }; export const DatasetStatsSummary = ({ @@ -40,6 +43,7 @@ export const DatasetStatsSummary = ({ lastUpdatedMs, color, mode = 'normal', + shouldWrap, }: Props) => { const isTooltipMode = mode === 'tooltip-content'; const displayedColor = isTooltipMode ? '' : color ?? ANTD_GRAY[7]; @@ -103,5 +107,5 @@ export const DatasetStatsSummary = ({ ), ].filter((stat) => stat); - return <>{statsViews.length > 0 && }; + return <>{statsViews.length > 0 && }; }; diff --git a/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx b/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx index 8101a696bf274e..4e223b6e540588 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/ExpandingStat.tsx @@ -2,9 +2,7 @@ import React, { ReactNode, useEffect, useRef, useState } from 'react'; import styled from 'styled-components'; const ExpandingStatContainer = styled.span<{ disabled: boolean; expanded: boolean; width: string }>` - overflow: hidden; - white-space: nowrap; - width: ${(props) => props.width}; + max-width: 100%; transition: width 250ms ease; `; @@ -13,6 +11,7 @@ const ExpandingStat = ({ render, }: { disabled?: boolean; + render: (isExpanded: boolean) => ReactNode; }) => { const contentRef = useRef(null); diff --git a/datahub-web-react/src/app/entity/dataset/shared/utils.ts b/datahub-web-react/src/app/entity/dataset/shared/utils.ts new file mode 100644 index 00000000000000..fedd54385e7ab4 --- /dev/null +++ b/datahub-web-react/src/app/entity/dataset/shared/utils.ts @@ -0,0 +1,13 @@ +import { DatasetProperties, Operation } from '../../../../types.generated'; + +export function getLastUpdatedMs( + properties: Pick | null | undefined, + operations: Pick[] | null | undefined, +): number | undefined { + return ( + Math.max( + properties?.lastModified?.time || 0, + (operations?.length && operations[0].lastUpdatedTimestamp) || 0, + ) || undefined + ); +} diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx index 2d82521a90df58..a2347928136e53 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx @@ -32,6 +32,7 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on variables: { input: { domainUrn: domain.urn, + id: builderState.id, properties: { name: builderState.name, description: builderState.description || undefined, @@ -49,10 +50,10 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on onClose(); } }) - .catch(() => { + .catch((error) => { onClose(); message.destroy(); - message.error({ content: 'Failed to create Data Product. An unexpected error occurred' }); + message.error({ content: `Failed to create Data Product: ${error.message}.` }); }); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx new file mode 100644 index 00000000000000..c3952fbaf5cb0d --- /dev/null +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx @@ -0,0 +1,66 @@ +import React from 'react'; +import { Collapse, Form, Input, Typography } from 'antd'; +import styled from 'styled-components'; +import { validateCustomUrnId } from '../../../shared/textUtil'; +import { DataProductBuilderFormProps } from './types'; + +const FormItem = styled(Form.Item)` + .ant-form-item-label { + padding-bottom: 2px; + } +`; + +const FormItemWithMargin = styled(FormItem)` + margin-bottom: 16px; +`; + +const FormItemNoMargin = styled(FormItem)` + margin-bottom: 0; +`; + +const AdvancedLabel = styled(Typography.Text)` + color: #373d44; +`; + +export function DataProductAdvancedOption({ builderState, updateBuilderState }: DataProductBuilderFormProps) { + function updateDataProductId(id: string) { + updateBuilderState({ + ...builderState, + id, + }); + } + + return ( + + Advanced Options} key="1"> + Data Product Id} + help="By default, a random UUID will be generated to uniquely identify this data product. If + you'd like to provide a custom id instead to more easily keep track of this data product, + you may provide it here. Be careful, you cannot easily change the data product id after + creation." + > + ({ + validator(_, value) { + if (value && validateCustomUrnId(value)) { + return Promise.resolve(); + } + return Promise.reject(new Error('Please enter a valid Data product id')); + }, + }), + ]} + > + updateDataProductId(e.target.value)} + /> + + + + + ); +} diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx index b5a27a6e1b8766..695267080c92fb 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx @@ -3,18 +3,14 @@ import React from 'react'; import styled from 'styled-components'; import { Editor as MarkdownEditor } from '../../shared/tabs/Documentation/components/editor/Editor'; import { ANTD_GRAY } from '../../shared/constants'; -import { DataProductBuilderState } from './types'; +import { DataProductBuilderFormProps } from './types'; +import { DataProductAdvancedOption } from './DataProductAdvancedOption'; const StyledEditor = styled(MarkdownEditor)` border: 1px solid ${ANTD_GRAY[4]}; `; -type Props = { - builderState: DataProductBuilderState; - updateBuilderState: (newState: DataProductBuilderState) => void; -}; - -export default function DataProductBuilderForm({ builderState, updateBuilderState }: Props) { +export default function DataProductBuilderForm({ builderState, updateBuilderState }: DataProductBuilderFormProps) { function updateName(name: string) { updateBuilderState({ ...builderState, @@ -47,6 +43,7 @@ export default function DataProductBuilderForm({ builderState, updateBuilderStat Description}> + ); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts index 1ed3ede39cfbe4..2015b97f1433b7 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts @@ -1,4 +1,10 @@ export type DataProductBuilderState = { name: string; + id?: string; description?: string; }; + +export type DataProductBuilderFormProps = { + builderState: DataProductBuilderState; + updateBuilderState: (newState: DataProductBuilderState) => void; +}; diff --git a/datahub-web-react/src/app/entity/domain/DomainEntity.tsx b/datahub-web-react/src/app/entity/domain/DomainEntity.tsx index 68c06935dbbe57..2b67c88a6ff235 100644 --- a/datahub-web-react/src/app/entity/domain/DomainEntity.tsx +++ b/datahub-web-react/src/app/entity/domain/DomainEntity.tsx @@ -14,6 +14,7 @@ import { EntityActionItem } from '../shared/entity/EntityActions'; import DataProductsTab from './DataProductsTab/DataProductsTab'; import { EntityProfileTab } from '../shared/constants'; import DomainIcon from '../../domain/DomainIcon'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; /** * Definition of the DataHub Domain entity. @@ -65,6 +66,8 @@ export class DomainEntity implements Entity { getCollectionName = () => 'Domains'; + useEntityQuery = useGetDomainQuery; + renderProfile = (urn: string) => ( { name: 'Data Products', component: DataProductsTab, }, - ]} - sidebarSections={[ { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, + name: 'Properties', + component: PropertiesTab, }, ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + }, + ]; + renderPreview = (_: PreviewType, data: Domain) => { return ( { + type: EntityType = EntityType.ErModelRelationship; + + icon = (fontSize: number, styleType: IconStyleType) => { + if (styleType === IconStyleType.TAB_VIEW) { + return ; + } + + if (styleType === IconStyleType.HIGHLIGHT) { + return ; + } + + if (styleType === IconStyleType.SVG) { + return ( + + ); + } + + return ; + }; + + isSearchEnabled = () => true; + + isBrowseEnabled = () => false; + + isLineageEnabled = () => false; + + getAutoCompleteFieldName = () => 'name'; + + getPathName = () => 'erModelRelationship'; + + getCollectionName = () => ''; + + getEntityName = () => 'ER-Model-Relationship'; + + renderProfile = (urn: string) => ( + + ); + + getOverridePropertiesFromEntity = (_ermodelrelation?: ErModelRelationship | null): GenericEntityProperties => { + return {}; + }; + + renderPreview = (_: PreviewType, data: ErModelRelationship) => { + return ( + <> + + {data.properties?.name || data.editableProperties?.name || ''} + + } + description={data?.editableProperties?.description || ''} + owners={data.ownership?.owners} + glossaryTerms={data?.glossaryTerms || undefined} + globalTags={data?.tags} + /> + + ); + }; + + renderSearch = (result: SearchResult) => { + return this.renderPreview(PreviewType.SEARCH, result.entity as ErModelRelationship); + }; + + displayName = (data: ErModelRelationship) => { + return data.properties?.name || data.editableProperties?.name || data.urn; + }; + + getGenericEntityProperties = (data: ErModelRelationship) => { + return getDataForEntityType({ + data, + entityType: this.type, + getOverrideProperties: this.getOverridePropertiesFromEntity, + }); + }; + + supportedCapabilities = () => { + return new Set([]); + }; +} diff --git a/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipAction.less b/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipAction.less new file mode 100644 index 00000000000000..41e201585f3b8e --- /dev/null +++ b/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipAction.less @@ -0,0 +1,10 @@ +.joinName { + width: 385px; + height: 24px; + font-style: normal; + font-weight: 700; + font-size: 16px; + line-height: 24px; + align-items: center; + color: #262626; +} diff --git a/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipPreviewCard.tsx b/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipPreviewCard.tsx new file mode 100644 index 00000000000000..715f935685d545 --- /dev/null +++ b/datahub-web-react/src/app/entity/ermodelrelationships/preview/ERModelRelationshipPreviewCard.tsx @@ -0,0 +1,56 @@ +import React from 'react'; +import { Card, Collapse } from 'antd'; +import ermodelrelationshipIcon from '../../../../images/ermodelrelationshipIcon.svg'; +import { EntityType, Owner, GlobalTags, GlossaryTerms } from '../../../../types.generated'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; +import { IconStyleType } from '../../Entity'; + +const { Panel } = Collapse; + +export const ERModelRelationshipPreviewCard = ({ + urn, + name, + owners, + description, + globalTags, + glossaryTerms, +}: { + urn: string; + name: string | any; + description: string | any; + globalTags?: GlobalTags | null; + glossaryTerms?: GlossaryTerms | null; + owners?: Array | null; +}): JSX.Element => { + const entityRegistry = useEntityRegistry(); + const getERModelRelationHeader = (): JSX.Element => { + return ( +
+ + } + tags={globalTags || undefined} + glossaryTerms={glossaryTerms || undefined} + owners={owners} + type="ERModelRelationship" + typeIcon={entityRegistry.getIcon(EntityType.ErModelRelationship, 14, IconStyleType.ACCENT)} + titleSizePx={18} + /> +
+ ); + }; + + return ( + <> + + + + + ); +}; diff --git a/datahub-web-react/src/app/entity/glossaryNode/GlossaryNodeEntity.tsx b/datahub-web-react/src/app/entity/glossaryNode/GlossaryNodeEntity.tsx index 486088b40a587f..803c703b7f4c55 100644 --- a/datahub-web-react/src/app/entity/glossaryNode/GlossaryNodeEntity.tsx +++ b/datahub-web-react/src/app/entity/glossaryNode/GlossaryNodeEntity.tsx @@ -11,6 +11,7 @@ import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import { DocumentationTab } from '../shared/tabs/Documentation/DocumentationTab'; import ChildrenTab from './ChildrenTab'; import { Preview } from './preview/Preview'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; class GlossaryNodeEntity implements Entity { type: EntityType = EntityType.GlossaryNode; @@ -48,6 +49,8 @@ class GlossaryNodeEntity implements Entity { getEntityName = () => 'Term Group'; + useEntityQuery = useGetGlossaryNodeQuery; + renderProfile = (urn: string) => { return ( { hideLinksButton: true, }, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - properties: { - hideLinksButton: true, - }, - }, { - component: SidebarOwnerSection, + name: 'Properties', + component: PropertiesTab, }, ]} + sidebarSections={this.getSidebarSections()} headerDropdownItems={ new Set([ EntityMenuItems.ADD_TERM_GROUP, @@ -93,6 +90,18 @@ class GlossaryNodeEntity implements Entity { ); }; + getSidebarSections = () => [ + { + component: SidebarAboutSection, + properties: { + hideLinksButton: true, + }, + }, + { + component: SidebarOwnerSection, + }, + ]; + displayName = (data: GlossaryNode) => { return data.properties?.name || data.urn; }; diff --git a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx index 080ee5889aec92..8bbc0a693b2231 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx @@ -17,6 +17,7 @@ import { SidebarAboutSection } from '../shared/containers/profile/sidebar/AboutS import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import { EntityActionItem } from '../shared/entity/EntityActions'; import { SidebarDomainSection } from '../shared/containers/profile/sidebar/Domain/SidebarDomainSection'; +import { PageRoutes } from '../../../conf/Global'; /** * Definition of the DataHub Dataset entity. @@ -57,6 +58,10 @@ export class GlossaryTermEntity implements Entity { getEntityName = () => 'Glossary Term'; + useEntityQuery = useGetGlossaryTermQuery; + + getCustomCardUrlPath = () => PageRoutes.GLOSSARY; + renderProfile = (urn) => { return ( { useEntityQuery={useGetGlossaryTermQuery as any} headerActionItems={new Set([EntityActionItem.BATCH_ADD_GLOSSARY_TERM])} headerDropdownItems={ - new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.MOVE, EntityMenuItems.DELETE]) + new Set([ + EntityMenuItems.UPDATE_DEPRECATION, + EntityMenuItems.CLONE, + EntityMenuItems.MOVE, + EntityMenuItems.DELETE, + ]) } isNameEditable hideBrowseBar @@ -100,25 +110,27 @@ export class GlossaryTermEntity implements Entity { component: PropertiesTab, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - }, - { - component: SidebarDomainSection, - properties: { - hideOwnerType: true, - }, - }, - ]} + sidebarSections={this.getSidebarSections()} getOverrideProperties={this.getOverridePropertiesFromEntity} /> ); }; + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + }, + { + component: SidebarDomainSection, + properties: { + hideOwnerType: true, + }, + }, + ]; + getOverridePropertiesFromEntity = (glossaryTerm?: GlossaryTerm | null): GenericEntityProperties => { // if dataset has subTypes filled out, pick the most specific subtype and return it return { diff --git a/datahub-web-react/src/app/entity/glossaryTerm/preview/__tests__/Preview.test.tsx b/datahub-web-react/src/app/entity/glossaryTerm/preview/__tests__/Preview.test.tsx index 35b8675c71b7ad..90aa0238c1cb45 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/preview/__tests__/Preview.test.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/preview/__tests__/Preview.test.tsx @@ -4,6 +4,7 @@ import React from 'react'; import { mocks } from '../../../../../Mocks'; import TestPageContainer from '../../../../../utils/test-utils/TestPageContainer'; import { Preview } from '../Preview'; +import { PreviewType } from '../../../Entity'; describe('Preview', () => { it('renders', () => { @@ -15,6 +16,7 @@ describe('Preview', () => { name="name" description="definition" owners={null} + previewType={PreviewType.PREVIEW} /> , diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx index 5b303f75e2985a..f97f3c327676b9 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/AddRelatedTermsModal.tsx @@ -10,11 +10,19 @@ import { BrowserWrapper } from '../../../shared/tags/AddTagsTermsModal'; import TermLabel from '../../../shared/TermLabel'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { useEntityData, useRefetch } from '../../shared/EntityContext'; +import ParentEntities from '../../../search/filters/ParentEntities'; +import { getParentEntities } from '../../../search/filters/utils'; const StyledSelect = styled(Select)` width: 480px; `; +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; + interface Props { onClose: () => void; relationshipType: TermRelationshipType; @@ -68,7 +76,10 @@ function AddRelatedTermsModal(props: Props) { return ( - + + + + ); }); diff --git a/datahub-web-react/src/app/entity/glossaryTerm/profile/__tests__/GlossaryTermHeader.test.tsx b/datahub-web-react/src/app/entity/glossaryTerm/profile/__tests__/GlossaryTermHeader.test.tsx index 192e66285e6093..0dfc088143baf5 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/profile/__tests__/GlossaryTermHeader.test.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/profile/__tests__/GlossaryTermHeader.test.tsx @@ -7,7 +7,7 @@ import GlossaryTermHeader from '../GlossaryTermHeader'; const glossaryTermHeaderData = { definition: 'this is sample definition', - termSource: 'termSource', + sourceUrl: 'sourceUrl', sourceRef: 'Source ref', fqdn: 'fqdn', }; @@ -19,9 +19,9 @@ describe('Glossary Term Header', () => { , diff --git a/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx new file mode 100644 index 00000000000000..356daf584d9f7e --- /dev/null +++ b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx @@ -0,0 +1,63 @@ +import React, { useState } from 'react'; +import { Button, Modal, Form } from 'antd'; +import styled from 'styled-components'; + +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../shared/constants'; + +type Props = { + onClose: () => void; + onSaveAboutMe: () => void; + setStagedDescription: (des: string) => void; + stagedDescription: string | undefined; +}; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + +export default function EditGroupDescriptionModal({ + onClose, + onSaveAboutMe, + setStagedDescription, + stagedDescription, +}: Props) { + const [form] = Form.useForm(); + const [aboutText, setAboutText] = useState(stagedDescription); + + function updateDescription(description: string) { + setAboutText(aboutText); + setStagedDescription(description); + } + + const saveDescription = () => { + onSaveAboutMe(); + onClose(); + }; + + return ( + + + + + } + > +
+ +
+ +
+
+ +
+ ); +} diff --git a/datahub-web-react/src/app/entity/group/GroupEditModal.tsx b/datahub-web-react/src/app/entity/group/GroupEditModal.tsx index 9db52c7598d1e8..be1289ad3202b3 100644 --- a/datahub-web-react/src/app/entity/group/GroupEditModal.tsx +++ b/datahub-web-react/src/app/entity/group/GroupEditModal.tsx @@ -7,6 +7,7 @@ type PropsData = { email: string | undefined; slack: string | undefined; urn: string | undefined; + photoUrl: string | undefined; }; type Props = { @@ -27,6 +28,7 @@ export default function GroupEditModal({ visible, onClose, onSave, editModalData slack: editModalData.slack, email: editModalData.email, urn: editModalData.urn, + photoUrl: editModalData.photoUrl, }); useEffect(() => { @@ -41,6 +43,7 @@ export default function GroupEditModal({ visible, onClose, onSave, editModalData input: { email: data.email, slack: data.slack, + pictureLink: data.photoUrl, }, }, }) @@ -55,6 +58,7 @@ export default function GroupEditModal({ visible, onClose, onSave, editModalData email: '', slack: '', urn: '', + photoUrl: '', }); }) .catch((e) => { @@ -125,6 +129,19 @@ export default function GroupEditModal({ visible, onClose, onSave, editModalData onChange={(event) => setData({ ...data, slack: event.target.value })} /> + + Image URL} + rules={[{ whitespace: true }, { type: 'url', message: 'not valid url' }]} + hasFeedback + > + setData({ ...data, photoUrl: event.target.value })} + /> + ); diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index d9eaed2682ea19..4cba8499877f1c 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -16,14 +16,16 @@ import { EmptyValue, SocialDetails, EditButton, - AboutSection, - AboutSectionText, GroupsSection, + AboutSection, } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; - -const { Paragraph } = Typography; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; +import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import EditGroupDescriptionModal from './EditGroupDescriptionModal'; +import { REDESIGN_COLORS } from '../shared/constants'; type SideBarData = { photoUrl: string | undefined; @@ -80,6 +82,61 @@ const GroupTitle = styled(Typography.Title)` } `; +const EditIcon = styled(EditOutlined)` + cursor: pointer; + color: ${REDESIGN_COLORS.BLUE}; +`; +const AddNewDescription = styled(Button)` + display: none; + margin: -4px; + width: 140px; +`; + +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + text-align:left; + font-weight: normal; + font + min-height: 22px; + + &:hover ${AddNewDescription} { + display: block; + } + & ins.diff { + background-color: #b7eb8f99; + text-decoration: none; + &:hover { + background-color: #b7eb8faa; + } + } + & del.diff { + background-color: #ffa39e99; + text-decoration: line-through; + &: hover { + background-color: #ffa39eaa; + } + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; + /** * Responsible for reading & writing users. */ @@ -101,12 +158,39 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const { url } = useRouteMatch(); const history = useHistory(); + const { updateTitle } = useBrowserTitle(); + + useEffect(() => { + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if (name) { + updateTitle(`Group | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if (name) { + // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + /* eslint-disable @typescript-eslint/no-unused-vars */ const [editGroupModal, showEditGroupModal] = useState(false); const me = useUserContext(); const canEditGroup = me?.platformPrivileges?.manageIdentities; const [groupTitle, setGroupTitle] = useState(name); + const [expanded, setExpanded] = useState(false); + const [isUpdatingDescription, SetIsUpdatingDescription] = useState(false); + const [stagedDescription, setStagedDescription] = useState(aboutText); + const [updateName] = useUpdateNameMutation(); + const overLimit = removeMarkdown(aboutText || '').length > 80; + const ABBREVIATED_LIMIT = 80; + + useEffect(() => { + setStagedDescription(aboutText); + }, [aboutText]); useEffect(() => { setGroupTitle(groupTitle); @@ -133,15 +217,16 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { urn, email, slack, + photoUrl, }; // About Text save - const onSaveAboutMe = (inputString) => { + const onSaveAboutMe = () => { updateCorpGroupPropertiesMutation({ variables: { urn: urn || '', input: { - description: inputString, + description: stagedDescription, }, }, }) @@ -201,16 +286,65 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { - {TITLES.about} - - - {aboutText || } - - + +
{TITLES.about} + + SetIsUpdatingDescription(true)} data-testid="edit-icon" /> + + + + {(aboutText && expanded) || !overLimit ? ( + <> + {/* Read only viewer for displaying group description */} + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + ) : ( + <> + {/* Display abbreviated description with option to read more */} + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {aboutText} + + + )} + + {/* Modal for updating group description */} + {isUpdatingDescription && ( + { + SetIsUpdatingDescription(false); + setStagedDescription(aboutText); + }} + onSaveAboutMe={onSaveAboutMe} + setStagedDescription={setStagedDescription} + stagedDescription={stagedDescription} + /> + )} diff --git a/datahub-web-react/src/app/entity/group/GroupProfile.tsx b/datahub-web-react/src/app/entity/group/GroupProfile.tsx index 53d2062277dec0..e8001ebccc3b5a 100644 --- a/datahub-web-react/src/app/entity/group/GroupProfile.tsx +++ b/datahub-web-react/src/app/entity/group/GroupProfile.tsx @@ -16,7 +16,7 @@ import NonExistentEntityPage from '../shared/entity/NonExistentEntityPage'; const messageStyle = { marginTop: '10%' }; export enum TabType { - Assets = 'Assets', + Assets = 'Owner Of', Members = 'Members', } @@ -89,7 +89,7 @@ export default function GroupProfile() { // Side bar data const sideBarData = { - photoUrl: undefined, + photoUrl: data?.corpGroup?.editableProperties?.pictureLink || undefined, avatarName: data?.corpGroup?.properties?.displayName || data?.corpGroup?.name || diff --git a/datahub-web-react/src/app/entity/mlFeature/MLFeatureEntity.tsx b/datahub-web-react/src/app/entity/mlFeature/MLFeatureEntity.tsx index a7f586c9108eed..2f2786b1c0d960 100644 --- a/datahub-web-react/src/app/entity/mlFeature/MLFeatureEntity.tsx +++ b/datahub-web-react/src/app/entity/mlFeature/MLFeatureEntity.tsx @@ -17,6 +17,7 @@ import { LineageTab } from '../shared/tabs/Lineage/LineageTab'; import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; /** * Definition of the DataHub MLFeature entity. @@ -57,6 +58,8 @@ export class MLFeatureEntity implements Entity { getCollectionName = () => 'Features'; + useEntityQuery = useGetMlFeatureQuery; + getOverridePropertiesFromEntity = (feature?: MlFeature | null): GenericEntityProperties => { return { // eslint-disable-next-line @@ -85,34 +88,40 @@ export class MLFeatureEntity implements Entity { name: 'Lineage', component: LineageTab, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, + name: 'Properties', + component: PropertiesTab, }, ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderPreview = (_: PreviewType, data: MlFeature) => { const genericProperties = this.getGenericEntityProperties(data); // eslint-disable-next-line diff --git a/datahub-web-react/src/app/entity/mlFeatureTable/MLFeatureTableEntity.tsx b/datahub-web-react/src/app/entity/mlFeatureTable/MLFeatureTableEntity.tsx index b3e509decd29d6..595c73fbc3cb67 100644 --- a/datahub-web-react/src/app/entity/mlFeatureTable/MLFeatureTableEntity.tsx +++ b/datahub-web-react/src/app/entity/mlFeatureTable/MLFeatureTableEntity.tsx @@ -63,6 +63,33 @@ export class MLFeatureTableEntity implements Entity { return {}; }; + useEntityQuery = useGetMlFeatureTableQuery; + + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderProfile = (urn: string) => ( { component: DocumentationTab, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, - ]} + sidebarSections={this.getSidebarSections()} /> ); diff --git a/datahub-web-react/src/app/entity/mlModel/MLModelEntity.tsx b/datahub-web-react/src/app/entity/mlModel/MLModelEntity.tsx index 62690d611dcddc..d4d0b37da9ec96 100644 --- a/datahub-web-react/src/app/entity/mlModel/MLModelEntity.tsx +++ b/datahub-web-react/src/app/entity/mlModel/MLModelEntity.tsx @@ -64,6 +64,33 @@ export class MLModelEntity implements Entity { }; }; + useEntityQuery = useGetMlModelQuery; + + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderProfile = (urn: string) => ( { component: PropertiesTab, }, ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, - }, - ]} + sidebarSections={this.getSidebarSections()} /> ); diff --git a/datahub-web-react/src/app/entity/mlModelGroup/MLModelGroupEntity.tsx b/datahub-web-react/src/app/entity/mlModelGroup/MLModelGroupEntity.tsx index 7adc7a6ee7e63c..5896c1864cc435 100644 --- a/datahub-web-react/src/app/entity/mlModelGroup/MLModelGroupEntity.tsx +++ b/datahub-web-react/src/app/entity/mlModelGroup/MLModelGroupEntity.tsx @@ -15,6 +15,7 @@ import ModelGroupModels from './profile/ModelGroupModels'; import { DocumentationTab } from '../shared/tabs/Documentation/DocumentationTab'; import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; /** * Definition of the DataHub MlModelGroup entity. @@ -59,6 +60,33 @@ export class MLModelGroupEntity implements Entity { return {}; }; + useEntityQuery = useGetMlModelGroupQuery; + + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderProfile = (urn: string) => ( { name: 'Documentation', component: DocumentationTab, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, - { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, { - component: DataProductSection, + name: 'Properties', + component: PropertiesTab, }, ]} + sidebarSections={this.getSidebarSections()} /> ); diff --git a/datahub-web-react/src/app/entity/mlPrimaryKey/MLPrimaryKeyEntity.tsx b/datahub-web-react/src/app/entity/mlPrimaryKey/MLPrimaryKeyEntity.tsx index 2549f4f6a00471..60c7531a4f57cc 100644 --- a/datahub-web-react/src/app/entity/mlPrimaryKey/MLPrimaryKeyEntity.tsx +++ b/datahub-web-react/src/app/entity/mlPrimaryKey/MLPrimaryKeyEntity.tsx @@ -16,6 +16,7 @@ import { SidebarOwnerSection } from '../shared/containers/profile/sidebar/Owners import { LineageTab } from '../shared/tabs/Lineage/LineageTab'; import DataProductSection from '../shared/containers/profile/sidebar/DataProduct/DataProductSection'; import { getDataProduct } from '../shared/utils'; +import { PropertiesTab } from '../shared/tabs/Properties/PropertiesTab'; /** * Definition of the DataHub MLPrimaryKey entity. @@ -63,6 +64,8 @@ export class MLPrimaryKeyEntity implements Entity { }; }; + useEntityQuery = useGetMlPrimaryKeyQuery; + renderProfile = (urn: string) => ( { name: 'Lineage', component: LineageTab, }, - ]} - sidebarSections={[ - { - component: SidebarAboutSection, - }, - { - component: SidebarOwnerSection, - properties: { - defaultOwnerType: OwnershipType.TechnicalOwner, - }, - }, { - component: SidebarTagsSection, - properties: { - hasTags: true, - hasTerms: true, - }, - }, - { - component: SidebarDomainSection, - }, - { - component: DataProductSection, + name: 'Properties', + component: PropertiesTab, }, ]} + sidebarSections={this.getSidebarSections()} /> ); + getSidebarSections = () => [ + { + component: SidebarAboutSection, + }, + { + component: SidebarOwnerSection, + properties: { + defaultOwnerType: OwnershipType.TechnicalOwner, + }, + }, + { + component: SidebarTagsSection, + properties: { + hasTags: true, + hasTerms: true, + }, + }, + { + component: SidebarDomainSection, + }, + { + component: DataProductSection, + }, + ]; + renderPreview = (_: PreviewType, data: MlPrimaryKey) => { const genericProperties = this.getGenericEntityProperties(data); // eslint-disable-next-line diff --git a/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx b/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx index cff10a3d96b301..a3304ab015faa4 100644 --- a/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx +++ b/datahub-web-react/src/app/entity/ownership/ManageOwnership.tsx @@ -6,6 +6,9 @@ import { OwnershipList } from './OwnershipList'; const PageContainer = styled.div` padding-top: 20px; width: 100%; + display: flex; + flex-direction: column; + overflow: auto; `; const PageHeaderContainer = styled.div` @@ -20,7 +23,11 @@ const PageTitle = styled(Typography.Title)` } `; -const ListContainer = styled.div``; +const ListContainer = styled.div` + display: flex; + flex-direction: column; + overflow: auto; +`; /** * Component used for displaying the 'Manage Ownership' experience. diff --git a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx index 41e07520a0ece5..cf4bf9a0fddf4c 100644 --- a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx +++ b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Dropdown, MenuProps, Popconfirm, Typography, message, notification } from 'antd'; -import { DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; +import { CopyOutlined, DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; import styled from 'styled-components/macro'; import { OwnershipTypeEntity } from '../../../../types.generated'; import { useDeleteOwnershipTypeMutation } from '../../../../graphql/ownership.generated'; @@ -48,6 +48,10 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe setOwnershipType(ownershipType); }; + const onCopy = () => { + navigator.clipboard.writeText(ownershipType.urn); + }; + const [deleteOwnershipTypeMutation] = useDeleteOwnershipTypeMutation(); const onDelete = () => { @@ -106,12 +110,23 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe ), }, + { + key: 'copy', + icon: ( + + + Copy Urn + + ), + }, ]; const onClick: MenuProps['onClick'] = (e) => { const key = e.key as string; if (key === 'edit') { editOnClick(); + } else if (key === 'copy') { + onCopy(); } }; diff --git a/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx b/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx new file mode 100644 index 00000000000000..482709c110d6b9 --- /dev/null +++ b/datahub-web-react/src/app/entity/restricted/RestrictedEntity.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { QuestionOutlined } from '@ant-design/icons'; +import { EntityType, Restricted, SearchResult } from '../../../types.generated'; +import { Entity, IconStyleType, PreviewType } from '../Entity'; +import { getDataForEntityType } from '../shared/containers/profile/utils'; +import RestrictedIcon from '../../../images/restricted.svg'; +import { RestrictedEntityProfile } from './RestrictedEntityProfile'; + +/** + * Definition of the DataHub Data Product entity. + */ +export class RestrictedEntity implements Entity { + type: EntityType = EntityType.Restricted; + + icon = (fontSize: number, styleType: IconStyleType, color?: string) => { + if (styleType === IconStyleType.TAB_VIEW) { + return ; + } + + if (styleType === IconStyleType.HIGHLIGHT) { + return ; + } + + return ( + + ); + }; + + isSearchEnabled = () => false; + + isBrowseEnabled = () => false; + + isLineageEnabled = () => true; + + getAutoCompleteFieldName = () => 'name'; + + getPathName = () => 'restricted'; + + getEntityName = () => 'Restricted'; + + getCollectionName = () => 'Restricted Assets'; + + renderProfile = (_: string) => ; + + renderPreview = (_: PreviewType, _data: Restricted) => { + return ; + }; + + renderSearch = (_result: SearchResult) => { + return ; + }; + + getLineageVizConfig = (entity: Restricted) => { + return { + urn: entity?.urn, + name: 'Restricted Asset', + type: EntityType.Restricted, + icon: RestrictedIcon, + }; + }; + + displayName = (_data: Restricted) => { + return 'Restricted Asset'; + }; + + getOverridePropertiesFromEntity = (_data: Restricted) => { + return {}; + }; + + getGenericEntityProperties = (data: Restricted) => { + return getDataForEntityType({ + data, + entityType: this.type, + getOverrideProperties: this.getOverridePropertiesFromEntity, + }); + }; + + supportedCapabilities = () => { + return new Set([]); + }; + + getGraphName = () => { + return 'restricted'; + }; +} diff --git a/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx b/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx new file mode 100644 index 00000000000000..cf8aa5935e42f9 --- /dev/null +++ b/datahub-web-react/src/app/entity/restricted/RestrictedEntityProfile.tsx @@ -0,0 +1,30 @@ +import React from 'react'; +import styled from 'styled-components'; +import { + LogoIcon, + PlatformContentWrapper, + PlatformText, + PreviewImage, +} from '../shared/containers/profile/header/PlatformContent/PlatformContentView'; +import RestrictedIcon from '../../../images/restricted.svg'; +import { EntityTitle } from '../shared/containers/profile/header/EntityName'; + +const SubHeader = styled.div` + margin-top: 8px; + font-size: 14px; +`; + +export function RestrictedEntityProfile() { + return ( + <> + + + + + Restricted + + Restricted Asset + This asset is Restricted. Please request access to see more. + + ); +} diff --git a/datahub-web-react/src/app/entity/schemaField/SchemaFieldPropertiesEntity.tsx b/datahub-web-react/src/app/entity/schemaField/SchemaFieldPropertiesEntity.tsx new file mode 100644 index 00000000000000..7e74b43e68afbb --- /dev/null +++ b/datahub-web-react/src/app/entity/schemaField/SchemaFieldPropertiesEntity.tsx @@ -0,0 +1,48 @@ +import * as React from 'react'; +import { PicCenterOutlined } from '@ant-design/icons'; +import { EntityType, SchemaFieldEntity, SearchResult } from '../../../types.generated'; +import { Entity, IconStyleType, PreviewType } from '../Entity'; +import { getDataForEntityType } from '../shared/containers/profile/utils'; +import { Preview } from './preview/Preview'; + +export class SchemaFieldPropertiesEntity implements Entity { + type: EntityType = EntityType.SchemaField; + + icon = (fontSize: number, styleType: IconStyleType, color = '#BFBFBF') => ( + + ); + + isSearchEnabled = () => true; + + isBrowseEnabled = () => false; + + isLineageEnabled = () => false; + + // Currently unused. + getAutoCompleteFieldName = () => 'schemaField'; + + // Currently unused. + getPathName = () => 'schemaField'; + + // Currently unused. + getEntityName = () => 'schemaField'; + + // Currently unused. + getCollectionName = () => 'schemaFields'; + + // Currently unused. + renderProfile = (_: string) => <>; + + renderPreview = (previewType: PreviewType, data: SchemaFieldEntity) => ( + + ); + + renderSearch = (result: SearchResult) => this.renderPreview(PreviewType.SEARCH, result.entity as SchemaFieldEntity); + + displayName = (data: SchemaFieldEntity) => data?.fieldPath || data.urn; + + getGenericEntityProperties = (data: SchemaFieldEntity) => + getDataForEntityType({ data, entityType: this.type, getOverrideProperties: (newData) => newData }); + + supportedCapabilities = () => new Set([]); +} diff --git a/datahub-web-react/src/app/entity/schemaField/preview/Preview.tsx b/datahub-web-react/src/app/entity/schemaField/preview/Preview.tsx new file mode 100644 index 00000000000000..b22e988c76672c --- /dev/null +++ b/datahub-web-react/src/app/entity/schemaField/preview/Preview.tsx @@ -0,0 +1,40 @@ +import React from 'react'; +import { PicCenterOutlined } from '@ant-design/icons'; +import { EntityType, Owner } from '../../../../types.generated'; +import DefaultPreviewCard from '../../../preview/DefaultPreviewCard'; +import { useEntityRegistry } from '../../../useEntityRegistry'; +import { IconStyleType, PreviewType } from '../../Entity'; + +export const Preview = ({ + datasetUrn, + name, + description, + owners, + previewType, +}: { + datasetUrn: string; + name: string; + description?: string | null; + owners?: Array | null; + previewType: PreviewType; +}): JSX.Element => { + const entityRegistry = useEntityRegistry(); + + const url = `${entityRegistry.getEntityUrl(EntityType.Dataset, datasetUrn)}/${encodeURIComponent( + 'Schema', + )}?schemaFilter=${encodeURIComponent(name)}`; + + return ( + } + type="Column" + typeIcon={entityRegistry.getIcon(EntityType.SchemaField, 14, IconStyleType.ACCENT)} + /> + ); +}; diff --git a/datahub-web-react/src/app/entity/shared/EntityContext.ts b/datahub-web-react/src/app/entity/shared/EntityContext.ts new file mode 100644 index 00000000000000..abc7fcfa6cced0 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/EntityContext.ts @@ -0,0 +1,67 @@ +import React, { useContext } from 'react'; +import { EntityType } from '../../../types.generated'; +import { useIsSeparateSiblingsMode } from './siblingUtils'; +import { EntityContextType, UpdateEntityType } from './types'; + +const EntityContext = React.createContext({ + urn: '', + entityType: EntityType.Dataset, + entityData: null, + loading: true, + baseEntity: null, + updateEntity: () => Promise.resolve({}), + routeToTab: () => {}, + refetch: () => Promise.resolve({}), + lineage: undefined, + dataNotCombinedWithSiblings: null, +}); + +export default EntityContext; + +export function useEntityContext() { + return useContext(EntityContext); +} + +export const useBaseEntity = (): T => { + const { baseEntity } = useContext(EntityContext); + return baseEntity as T; +}; + +export const useDataNotCombinedWithSiblings = (): T => { + const { dataNotCombinedWithSiblings } = useContext(EntityContext); + return dataNotCombinedWithSiblings as T; +}; + +export const useEntityUpdate = (): UpdateEntityType | null | undefined => { + const { updateEntity } = useContext(EntityContext); + return updateEntity; +}; + +export const useEntityData = () => { + const { urn, entityType, entityData, loading } = useContext(EntityContext); + return { urn, entityType, entityData, loading }; +}; + +export const useRouteToTab = () => { + const { routeToTab } = useContext(EntityContext); + return routeToTab; +}; + +export const useRefetch = () => { + const { refetch } = useContext(EntityContext); + return refetch; +}; + +export const useLineageData = () => { + const { lineage } = useContext(EntityContext); + return lineage; +}; + +export const useMutationUrn = () => { + const { urn, entityData } = useContext(EntityContext); + const isHideSiblingMode = useIsSeparateSiblingsMode(); + if (!entityData?.siblings || entityData?.siblings?.isPrimary || isHideSiblingMode) { + return urn; + } + return entityData?.siblings?.siblings?.[0]?.urn || urn; +}; diff --git a/datahub-web-react/src/app/entity/shared/EntityContext.tsx b/datahub-web-react/src/app/entity/shared/EntityContext.tsx deleted file mode 100644 index c564d73c7441fd..00000000000000 --- a/datahub-web-react/src/app/entity/shared/EntityContext.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import React, { useContext } from 'react'; -import { EntityType } from '../../../types.generated'; -import { useIsSeparateSiblingsMode } from './siblingUtils'; -import { EntityContextType, UpdateEntityType } from './types'; - -const EntityContext = React.createContext({ - urn: '', - entityType: EntityType.Dataset, - entityData: null, - loading: true, - baseEntity: null, - updateEntity: () => Promise.resolve({}), - routeToTab: () => {}, - refetch: () => Promise.resolve({}), - lineage: undefined, - dataNotCombinedWithSiblings: null, -}); - -export default EntityContext; - -export function useEntityContext() { - return useContext(EntityContext); -} - -export const useBaseEntity = (): T => { - const { baseEntity } = useContext(EntityContext); - return baseEntity as T; -}; - -export const useDataNotCombinedWithSiblings = (): T => { - const { dataNotCombinedWithSiblings } = useContext(EntityContext); - return dataNotCombinedWithSiblings as T; -}; - -export const useEntityUpdate = (): UpdateEntityType | null | undefined => { - const { updateEntity } = useContext(EntityContext); - return updateEntity; -}; - -export const useEntityData = () => { - const { urn, entityType, entityData, loading } = useContext(EntityContext); - return { urn, entityType, entityData, loading }; -}; - -export const useRouteToTab = () => { - const { routeToTab } = useContext(EntityContext); - return routeToTab; -}; - -export const useRefetch = () => { - const { refetch } = useContext(EntityContext); - return refetch; -}; - -export const useLineageData = () => { - const { lineage } = useContext(EntityContext); - return lineage; -}; - -export const useMutationUrn = () => { - const { urn, entityData } = useContext(EntityContext); - const isHideSiblingMode = useIsSeparateSiblingsMode(); - if (!entityData?.siblings || entityData?.siblings?.isPrimary || isHideSiblingMode) { - return urn; - } - return entityData?.siblings?.siblings?.[0]?.urn || urn; -}; diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx index 9788d36af2c65a..d60e86b0af8ca4 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx @@ -1,8 +1,9 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components/macro'; import { EditOutlined } from '@ant-design/icons'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; import DOMPurify from 'dompurify'; +import { useHistory } from 'react-router'; import { useCreateGlossaryTermMutation, useCreateGlossaryNodeMutation, @@ -16,6 +17,7 @@ import DescriptionModal from '../components/legacy/DescriptionModal'; import { validateCustomUrnId } from '../../../shared/textUtil'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getGlossaryRootToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; +import { getEntityPath } from '../containers/profile/utils'; const StyledItem = styled(Form.Item)` margin-bottom: 0; @@ -33,6 +35,7 @@ interface Props { entityType: EntityType; onClose: () => void; refetchData?: () => void; + isCloning?: boolean; } function CreateGlossaryEntityModal(props: Props) { @@ -43,15 +46,31 @@ function CreateGlossaryEntityModal(props: Props) { const entityRegistry = useEntityRegistry(); const [stagedId, setStagedId] = useState(undefined); const [stagedName, setStagedName] = useState(''); - const [selectedParentUrn, setSelectedParentUrn] = useState(entityData.urn); + const [selectedParentUrn, setSelectedParentUrn] = useState(props.isCloning ? '' : entityData.urn); const [documentation, setDocumentation] = useState(''); const [isDocumentationModalVisible, setIsDocumentationModalVisible] = useState(false); const [createButtonDisabled, setCreateButtonDisabled] = useState(true); const refetch = useRefetch(); + const history = useHistory(); const [createGlossaryTermMutation] = useCreateGlossaryTermMutation(); const [createGlossaryNodeMutation] = useCreateGlossaryNodeMutation(); + useEffect(() => { + if (props.isCloning && entityData.entityData) { + const { properties } = entityData.entityData; + + if (properties?.name) { + setStagedName(properties.name); + form.setFieldValue('name', properties.name); + } + + if (properties?.description) { + setDocumentation(properties.description); + } + } + }, [props.isCloning, entityData.entityData, form]); + function createGlossaryEntity() { const mutation = entityType === EntityType.GlossaryTerm ? createGlossaryTermMutation : createGlossaryNodeMutation; @@ -67,7 +86,7 @@ function CreateGlossaryEntityModal(props: Props) { }, }, }) - .then(() => { + .then((res) => { message.loading({ content: 'Updating...', duration: 2 }); setTimeout(() => { analytics.event({ @@ -82,12 +101,19 @@ function CreateGlossaryEntityModal(props: Props) { refetch(); if (isInGlossaryContext) { // either refresh this current glossary node or the root nodes or root terms - const nodeToUpdate = entityData?.urn || getGlossaryRootToUpdate(entityType); + const nodeToUpdate = selectedParentUrn || getGlossaryRootToUpdate(entityType); updateGlossarySidebar([nodeToUpdate], urnsToUpdate, setUrnsToUpdate); } if (refetchData) { refetchData(); } + if (props.isCloning) { + const redirectUrn = + entityType === EntityType.GlossaryTerm + ? res.data?.createGlossaryTerm + : res.data?.createGlossaryNode; + history.push(getEntityPath(entityType, redirectUrn, entityRegistry, false, false)); + } }, 2000); }) .catch((e) => { diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 5d4f9d9f875cfe..2856a219c435d6 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -9,8 +9,10 @@ import { LinkOutlined, MoreOutlined, PlusOutlined, + CopyOutlined, + WarningOutlined, } from '@ant-design/icons'; -import { Redirect } from 'react-router'; +import { Redirect, useHistory } from 'react-router'; import { EntityType } from '../../../../types.generated'; import CreateGlossaryEntityModal from './CreateGlossaryEntityModal'; import { UpdateDeprecationModal } from './UpdateDeprecationModal'; @@ -24,6 +26,9 @@ import { shouldDisplayChildDeletionWarning, isDeleteDisabled, isMoveDisabled } f import { useUserContext } from '../../../context/useUserContext'; import MoveDomainModal from './MoveDomainModal'; import { useIsNestedDomainsEnabled } from '../../../useAppConfig'; +import { getEntityPath } from '../containers/profile/utils'; +import { useIsSeparateSiblingsMode } from '../siblingUtils'; +import { AddIncidentModal } from '../tabs/Incident/components/AddIncidentModal'; export enum EntityMenuItems { COPY_URL, @@ -32,6 +37,8 @@ export enum EntityMenuItems { ADD_TERM_GROUP, DELETE, MOVE, + CLONE, + RAISE_INCIDENT, } export const MenuIcon = styled(MoreOutlined)<{ fontSize?: number }>` @@ -79,6 +86,8 @@ interface Props { } function EntityDropdown(props: Props) { + const history = useHistory(); + const { urn, entityData, @@ -95,6 +104,7 @@ function EntityDropdown(props: Props) { const me = useUserContext(); const entityRegistry = useEntityRegistry(); const [updateDeprecation] = useUpdateDeprecationMutation(); + const isHideSiblingMode = useIsSeparateSiblingsMode(); const isNestedDomainsEnabled = useIsNestedDomainsEnabled(); const { onDeleteEntity, hasBeenDeleted } = useDeleteEntity( urn, @@ -107,8 +117,10 @@ function EntityDropdown(props: Props) { const [isCreateTermModalVisible, setIsCreateTermModalVisible] = useState(false); const [isCreateNodeModalVisible, setIsCreateNodeModalVisible] = useState(false); + const [isCloneEntityModalVisible, setIsCloneEntityModalVisible] = useState(false); const [isDeprecationModalVisible, setIsDeprecationModalVisible] = useState(false); const [isMoveModalVisible, setIsMoveModalVisible] = useState(false); + const [isRaiseIncidentModalVisible, setIsRaiseIncidentModalVisible] = useState(false); const handleUpdateDeprecation = async (deprecatedStatus: boolean) => { message.loading({ content: 'Updating...' }); @@ -177,6 +189,7 @@ function EntityDropdown(props: Props) { )} {menuItems.has(EntityMenuItems.ADD_TERM) && ( setIsCreateTermModalVisible(true)} @@ -230,6 +243,24 @@ function EntityDropdown(props: Props) { )} + {menuItems.has(EntityMenuItems.CLONE) && ( + setIsCloneEntityModalVisible(true)} + > + +  Clone + + + )} + {menuItems.has(EntityMenuItems.RAISE_INCIDENT) && ( + + setIsRaiseIncidentModalVisible(true)}> +  Raise Incident + + + )} } trigger={['click']} @@ -250,6 +281,14 @@ function EntityDropdown(props: Props) { refetchData={refetchForNodes} /> )} + {isCloneEntityModalVisible && ( + setIsCloneEntityModalVisible(false)} + refetchData={entityType === EntityType.GlossaryTerm ? refetchForTerms : refetchForNodes} + isCloning + /> + )} {isDeprecationModalVisible && ( setIsMoveModalVisible(false)} />} {hasBeenDeleted && !onDelete && deleteRedirectPath && } + {isRaiseIncidentModalVisible && ( + setIsRaiseIncidentModalVisible(false)} + refetch={ + (() => { + refetchForEntity?.(); + history.push( + `${getEntityPath( + entityType, + urn, + entityRegistry, + false, + isHideSiblingMode, + 'Incidents', + )}`, + ); + }) as any + } + /> + )} ); } diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/MoveGlossaryEntityModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/MoveGlossaryEntityModal.tsx index 37a625f58100b3..51b39be4e20ea2 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/MoveGlossaryEntityModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/MoveGlossaryEntityModal.tsx @@ -7,6 +7,7 @@ import { useUpdateParentNodeMutation } from '../../../../graphql/glossary.genera import NodeParentSelect from './NodeParentSelect'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getGlossaryRootToUpdate, getParentNodeToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; +import { getModalDomContainer } from '../../../../utils/focus'; const StyledItem = styled(Form.Item)` margin-bottom: 0; @@ -78,6 +79,7 @@ function MoveGlossaryEntityModal(props: Props) { } + getContainer={getModalDomContainer} >
diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx index c3bfac35c2ca6d..7227354a465695 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/NodeParentSelect.tsx @@ -1,5 +1,6 @@ import React from 'react'; import { Select } from 'antd'; +import styled from 'styled-components'; import { EntityType, GlossaryNode, SearchResult } from '../../../../types.generated'; import { useEntityRegistry } from '../../../useEntityRegistry'; import { useEntityData } from '../EntityContext'; @@ -7,6 +8,14 @@ import ClickOutside from '../../../shared/ClickOutside'; import GlossaryBrowser from '../../../glossary/GlossaryBrowser/GlossaryBrowser'; import { BrowserWrapper } from '../../../shared/tags/AddTagsTermsModal'; import useParentSelector from './useParentSelector'; +import ParentEntities from '../../../search/filters/ParentEntities'; +import { getParentGlossary } from '../../../glossary/utils'; + +const SearchResultContainer = styled.div` + display: flex; + flex-direction: column; + justify-content: center; +`; // filter out entity itself and its children export function filterResultsForMove(entity: GlossaryNode, entityUrn: string) { @@ -21,6 +30,7 @@ interface Props { selectedParentUrn: string; setSelectedParentUrn: (parent: string) => void; isMoving?: boolean; + autofocus?: boolean; } function NodeParentSelect(props: Props) { @@ -45,10 +55,9 @@ function NodeParentSelect(props: Props) { setSelectedParentUrn, }); - let nodeSearchResults: SearchResult[] = []; - if (isMoving) { - nodeSearchResults = searchResults.filter((r) => filterResultsForMove(r.entity as GlossaryNode, entityDataUrn)); - } + const nodeSearchResults: SearchResult[] = searchResults.filter((r) => + filterResultsForMove(r.entity as GlossaryNode, entityDataUrn), + ); const isShowingGlossaryBrowser = !searchQuery && isFocusedOnInput; const shouldHideSelf = isMoving && entityType === EntityType.GlossaryNode; @@ -65,10 +74,14 @@ function NodeParentSelect(props: Props) { onClear={clearSelectedParent} onFocus={() => setIsFocusedOnInput(true)} dropdownStyle={isShowingGlossaryBrowser || !searchQuery ? { display: 'none' } : {}} + autoFocus={props.autofocus} > {nodeSearchResults?.map((result) => ( - {entityRegistry.getDisplayName(result.entity.type, result.entity)} + + + {entityRegistry.getDisplayName(result.entity.type, result.entity)} + ))} diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx index 512735e60b2c3f..01287c2b367bf5 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/UpdateDeprecationModal.tsx @@ -1,7 +1,10 @@ import React from 'react'; -import { Button, DatePicker, Form, Input, message, Modal } from 'antd'; +import { Button, DatePicker, Form, message, Modal } from 'antd'; +import styled from 'styled-components'; import { useBatchUpdateDeprecationMutation } from '../../../../graphql/mutations.generated'; import { handleBatchError } from '../utils'; +import { Editor } from '../tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../constants'; type Props = { urns: string[]; @@ -9,6 +12,10 @@ type Props = { refetch?: () => void; }; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4.5]}; +`; + export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { const [batchUpdateDeprecation] = useBatchUpdateDeprecationMutation(); const [form] = Form.useForm(); @@ -27,7 +34,7 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { resources: [...urns.map((urn) => ({ resourceUrn: urn }))], deprecated: true, note: formData.note, - decommissionTime: formData.decommissionTime && formData.decommissionTime.unix(), + decommissionTime: formData.decommissionTime && formData.decommissionTime.unix() * 1000, }, }, }); @@ -64,10 +71,11 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { } + width="40%" > - - + + diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx index 1e4737135ed748..aa00e8ebc879d3 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx @@ -7,6 +7,7 @@ import analytics, { EventType } from '../../../analytics'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getParentNodeToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; import { useHandleDeleteDomain } from './useHandleDeleteDomain'; +import { removeTermFromGlossaryNode } from '../../../glossary/cacheUtils'; /** * Performs the flow for deleting an entity of a given type. @@ -28,15 +29,10 @@ function useDeleteEntity( const { isInGlossaryContext, urnsToUpdate, setUrnsToUpdate } = useGlossaryEntityData(); const { handleDeleteDomain } = useHandleDeleteDomain({ entityData, urn }); - const maybeDeleteEntity = getDeleteEntityMutation(type)(); - const deleteEntity = (maybeDeleteEntity && maybeDeleteEntity[0]) || undefined; + const [deleteEntity, { client }] = getDeleteEntityMutation(type)() ?? [undefined, { client: undefined }]; function handleDeleteEntity() { - deleteEntity?.({ - variables: { - urn, - }, - }) + deleteEntity?.({ variables: { urn } }) .then(() => { analytics.event({ type: EventType.DeleteEntityEvent, @@ -61,6 +57,9 @@ function useDeleteEntity( if (isInGlossaryContext) { const parentNodeToUpdate = getParentNodeToUpdate(entityData, type); updateGlossarySidebar([parentNodeToUpdate], urnsToUpdate, setUrnsToUpdate); + if (client) { + removeTermFromGlossaryNode(client, parentNodeToUpdate, urn); + } } if (!hideMessage) { message.success({ diff --git a/datahub-web-react/src/app/entity/shared/ExternalUrlButton.tsx b/datahub-web-react/src/app/entity/shared/ExternalUrlButton.tsx index dce74c02cdb345..d821cbfc01355e 100644 --- a/datahub-web-react/src/app/entity/shared/ExternalUrlButton.tsx +++ b/datahub-web-react/src/app/entity/shared/ExternalUrlButton.tsx @@ -5,6 +5,8 @@ import UrlButton from './UrlButton'; const GITHUB_LINK = 'github.com'; const GITHUB = 'GitHub'; +const GITLAB_LINK = 'gitlab.com'; +const GITLAB = 'GitLab'; interface Props { externalUrl: string; @@ -26,6 +28,8 @@ export default function ExternalUrlButton({ externalUrl, platformName, entityTyp let displayedName = platformName; if (externalUrl.toLocaleLowerCase().includes(GITHUB_LINK)) { displayedName = GITHUB; + } else if (externalUrl.toLocaleLowerCase().includes(GITLAB_LINK)) { + displayedName = GITLAB; } return ( diff --git a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx index f00f16647c94b4..79ec142fd801d4 100644 --- a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx +++ b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx @@ -10,6 +10,8 @@ export interface GlossaryEntityContextType { // This will happen when you edit a name, move a term/group, create a new term/group, and delete a term/group urnsToUpdate: string[]; setUrnsToUpdate: (updatdUrns: string[]) => void; + isSidebarOpen: boolean; + setIsSidebarOpen: (isOpen: boolean) => void; } export const GlossaryEntityContext = React.createContext({ @@ -18,10 +20,27 @@ export const GlossaryEntityContext = React.createContext {}, urnsToUpdate: [], setUrnsToUpdate: () => {}, + isSidebarOpen: true, + setIsSidebarOpen: () => {}, }); export const useGlossaryEntityData = () => { - const { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate } = - useContext(GlossaryEntityContext); - return { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }; + const { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + } = useContext(GlossaryEntityContext); + return { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }; }; diff --git a/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx b/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx index cb37c44a36caa7..0e899bc391e0a7 100644 --- a/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx +++ b/datahub-web-react/src/app/entity/shared/components/legacy/DescriptionModal.tsx @@ -31,6 +31,17 @@ type Props = { export default function UpdateDescriptionModal({ title, description, original, onClose, onSubmit, isAddDesc }: Props) { const [updatedDesc, setDesc] = useState(description || original || ''); + const handleEditorKeyDown = (event: React.KeyboardEvent) => { + if ( + event.key === 'ArrowDown' || + event.key === 'ArrowUp' || + event.key === 'ArrowRight' || + event.key === 'ArrowLeft' + ) { + event.stopPropagation(); + } + }; + return ( - + {!isAddDesc && description && original && ( Original:}> diff --git a/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx b/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx index 669e32520b038a..579c58214aaa1a 100644 --- a/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx +++ b/datahub-web-react/src/app/entity/shared/components/legacy/MarkdownViewer.tsx @@ -76,6 +76,7 @@ export const MarkdownView = styled(MDEditor.Markdown)` export type Props = { source: string; limit?: number; + // eslint-disable-next-line react/no-unused-prop-types isCompact?: boolean; editable?: boolean; onEditClicked?: () => void; diff --git a/datahub-web-react/src/app/entity/shared/components/styled/AddLinkModal.tsx b/datahub-web-react/src/app/entity/shared/components/styled/AddLinkModal.tsx index 68a8cf40943629..9e18de3b294bf7 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/AddLinkModal.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/AddLinkModal.tsx @@ -5,6 +5,7 @@ import { useEntityData, useMutationUrn } from '../../EntityContext'; import { useAddLinkMutation } from '../../../../../graphql/mutations.generated'; import analytics, { EventType, EntityActionType } from '../../../../analytics'; import { useUserContext } from '../../../../context/useUserContext'; +import { getModalDomContainer } from '../../../../../utils/focus'; type AddLinkProps = { buttonProps?: Record; @@ -73,6 +74,7 @@ export const AddLinkModal = ({ buttonProps, refetch }: AddLinkProps) => { Add , ]} + getContainer={getModalDomContainer} > void; showUndeprecate: boolean | null; }; +const ABBREVIATED_LIMIT = 80; export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: Props) => { const [batchUpdateDeprecationMutation] = useBatchUpdateDeprecationMutation(); + const [expanded, setExpanded] = useState(false); + const overLimit = deprecation?.note && removeMarkdown(deprecation?.note).length > 80; /** * Deprecation Decommission Timestamp */ const localeTimezone = getLocaleTimezone(); + + let decommissionTimeSeconds; + if (deprecation.decommissionTime) { + if (deprecation.decommissionTime < 943920000000) { + // Time is set in way past if it was milli-second so considering this as set in seconds + decommissionTimeSeconds = deprecation.decommissionTime; + } else { + decommissionTimeSeconds = deprecation.decommissionTime / 1000; + } + } const decommissionTimeLocal = - (deprecation.decommissionTime && + (decommissionTimeSeconds && `Scheduled to be decommissioned on ${moment - .unix(deprecation.decommissionTime) + .unix(decommissionTimeSeconds) .format('DD/MMM/YYYY')} (${localeTimezone})`) || undefined; const decommissionTimeGMT = - deprecation.decommissionTime && - moment.unix(deprecation.decommissionTime).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z'); + decommissionTimeSeconds && moment.unix(decommissionTimeSeconds).utc().format('dddd, DD/MMM/YYYY HH:mm:ss z'); const hasDetails = deprecation.note !== '' || deprecation.decommissionTime !== null; const isDividerNeeded = deprecation.note !== '' && deprecation.decommissionTime !== null; @@ -122,14 +155,55 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: return ( {deprecation?.note !== '' && Deprecation note} {isDividerNeeded && } - {deprecation?.note !== '' && {deprecation.note}} + + {expanded || !overLimit ? ( + <> + {deprecation?.note && deprecation?.note !== '' && ( + <> + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + )} + + ) : ( + <> + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {deprecation.note} + + + )} + {deprecation?.decommissionTime !== null && ( diff --git a/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.less b/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.less new file mode 100644 index 00000000000000..363c5080efffcc --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.less @@ -0,0 +1,296 @@ +.CreateERModelRelationModal { + .ermodelrelation-name { + padding: 8px 16px; + width: 948.5px !important; + height: 40px !important; + background: #ffffff; + border: 1px solid #d9d9d9; + box-shadow: 0px 2px 0px rgba(0, 0, 0, 0.016); + border-radius: 2px; + align-items: center; + } + .ant-select-single.ant-select-lg:not(.ant-select-customize-input) .ant-select-selector { + align-items: center; + padding: 8px 16px; + gap: 8px; + max-width: 370px; + min-width: 370px; + height: 38px; + background: #ffffff; + border: 1px solid #d9d9d9; + box-shadow: 0px 2px 0px rgba(0, 0, 0, 0.016); + border-radius: 2px; + } + .ant-modal-content { + box-sizing: border-box; + width: 1000px; + height: 765px; + background: #ffffff; + border: 1px solid #adc0d7; + box-shadow: 0px 0px 10px rgba(0, 0, 0, 0.15); + border-radius: 8px; + left: -215px; + top: -55px; + } + .inner-div { + width: 970px; + height: 640px; + overflow-y: scroll; + margin-top: -20px; + overflow-x: hidden; + } + .ant-modal-header { + padding-top: 32px; + padding-left: 16px; + border-bottom: 0px !important; + } + + .ermodelrelation-title { + width: 300px !important; + height: 22px; + font-family: 'Arial'; + font-style: normal; + font-weight: 700; + font-size: 20px; + line-height: 22px; + color: #000000; + padding-top: 4px; + } + .all-content-heading { + width: 380px; + height: 16px; + margin-top: 16px; + margin-bottom: 8px; + font-family: 'Arial'; + font-style: normal; + font-weight: 700; + font-size: 14px; + line-height: 16px; + color: #1b2f41; + flex: none; + } + .all-table-heading { + width: 380px; + height: 16px; + margin-bottom: 8px; + font-family: 'Arial'; + font-style: normal; + font-weight: 700; + font-size: 14px; + line-height: 16px; + color: #1b2f41; + flex: none; + } + + .field-heading { + height: 16px; + margin-top: 32px; + margin-bottom: 8px; + font-family: 'Arial'; + font-style: normal; + font-weight: 700; + font-size: 14px; + line-height: 16px; + color: #1b2f41; + } + .all-information { + width: 680px; + height: 24px; + font-family: 'Arial'; + font-style: normal; + font-weight: 400; + font-size: 16px; + color: #1b2f41; + } + .techNameDisplay { + font-size: 14px; + font-style: normal; + font-weight: 400; + color: #595959; + } + .instructions-list { + width: 774px; + height: 220px; + font-family: 'Arial'; + font-style: normal; + font-weight: 400; + font-size: 14px; + line-height: 150%; + color: #556573; + flex: none; + } + .ant-modal-footer { + padding-top: 0px; + padding-bottom: 10px; + padding-right: 25px; + border-top: 0px; + } + + .ant-btn-link { + padding-left: 0px !important; + padding-right: 1px !important; + font-family: 'Arial' !important; + font-style: normal !important; + font-weight: 400 !important; + font-size: 14px !important; + color: #1890ff !important; + } + .add-btn-link { + padding-left: 865px !important; + padding-right: 8px !important; + padding-top: 16px !important; + height: 20px; + font-family: 'Arial' !important; + font-style: normal !important; + font-weight: 700 !important; + font-size: 12px !important; + color: #1890ff !important; + line-height: 20px; + } + + .cancel-btn { + box-sizing: border-box; + margin-left: 440px; + width: 85px; + height: 40px !important; + background: #ffffff; + border: 1px solid #d9d9d9 !important; + border-radius: 5px; + color: #262626; + } + + .submit-btn, + .submit-btn:hover { + margin-left: 28px; + //margin-top: 6px; + width: 86px; + height: 40px; + background: #1890ff; + border: none; + color: #ffffff; + } + .footer-parent-div { + padding-left: 8px; + display: flex; + } + .ermodelrelation-select-selector { + align-items: center; + width: 300px !important; + height: 38px !important; + border: none; + max-width: 373px !important; + min-width: 373px !important; + font-size: 14px; + line-height: 22px; + font-family: 'Roboto Mono', monospace; + font-weight: 400; + background: white; + font-style: normal; + color: #000000d9; + } + .ermodelrelation-details-ta { + height: 95px; + width: 720px; + font-style: normal; + font-weight: 400; + font-size: 14px; + line-height: 22px; + color: rgba(0, 0, 0, 0.85); + } + .ERModelRelationTable { + .icon-image { + box-sizing: border-box; + width: 16px; + height: 0px; + border: 1px solid #000000; + } + .ant-table-content { + width: 950px; + } + .ant-table-thead > tr th { + font-style: normal; + font-weight: 500; + font-size: 14px; + line-height: 22px; + color: #1b2f41; + align-items: center; + padding: 16px; + gap: 4px; + isolation: isolate; + height: 56px !important; + background: #ffffff; + border-color: rgba(0, 0, 0, 0.12); + } + .ant-table-tbody > tr td { + letter-spacing: 0.3px; + margin-left: 0px; + background: white; + font-style: normal; + font-weight: 400; + font-size: 14px; + line-height: 22px; + color: rgba(0, 0, 0, 0.85); + border-color: rgba(0, 0, 0, 0.12); + } + td:nth-child(1), + td:nth-child(3) { + max-width: 400px !important; + min-width: 400px !important; + } + .titleNameDisplay { + max-width: 360px; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + width: fit-content; + display: inline-block; + font-size: 14px; + padding: 4px 0; + } + .firstRow { + display: flex; + justify-content: left; + } + + .editableNameDisplay { + display: block; + overflow-wrap: break-word; + white-space: nowrap; + max-width: 360px; + overflow: hidden; + text-overflow: ellipsis; + height: 16px; + font-family: 'Arial'; + font-style: normal; + font-weight: 400; + font-size: 14px; + line-height: 16px; + color: #595959; + } + td:nth-child(2), + th:nth-child(2) { + min-width: 44px !important; + max-width: 44px !important; + } + td:nth-child(4), + th:nth-child(4) { + min-width: 75px !important; + max-width: 75px !important; + } + table { + border-radius: 0.375rem; + border-collapse: collapse; + } + .SelectedRow { + background-color: #ecf2f8; + } + } +} +.cancel-modal { + .ant-btn-primary { + color: #ffffff; + background: #1890ff; + border: none; + box-shadow: none; + } +} diff --git a/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.tsx b/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.tsx new file mode 100644 index 00000000000000..dd6cbc36987054 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/components/styled/ERModelRelationship/CreateERModelRelationModal.tsx @@ -0,0 +1,431 @@ +import React, { useState } from 'react'; +import { Button, Form, Input, message, Modal, Table } from 'antd'; +import TextArea from 'antd/lib/input/TextArea'; +import { PlusOutlined } from '@ant-design/icons'; +import arrow from '../../../../../../images/Arrow.svg'; +import './CreateERModelRelationModal.less'; +import { EntityType, ErModelRelationship, OwnerEntityType } from '../../../../../../types.generated'; +import { + useCreateErModelRelationshipMutation, + useUpdateErModelRelationshipMutation, +} from '../../../../../../graphql/ermodelrelationship.generated'; +import { useUserContext } from '../../../../../context/useUserContext'; +import { EditableRow } from './EditableRow'; +import { EditableCell } from './EditableCell'; +import { + checkDuplicateERModelRelation, + getDatasetName, + ERModelRelationDataType, + validateERModelRelation, +} from './ERModelRelationUtils'; +import { useGetSearchResultsQuery } from '../../../../../../graphql/search.generated'; +import { useAddOwnerMutation } from '../../../../../../graphql/mutations.generated'; + +type Props = { + table1?: any; + table1Schema?: any; + table2?: any; + table2Schema?: any; + visible: boolean; + setModalVisible?: any; + onCancel: () => void; + editERModelRelation?: ErModelRelationship; + isEditing?: boolean; + refetch: () => Promise; +}; + +type EditableTableProps = Parameters[0]; +type ColumnTypes = Exclude; + +export const CreateERModelRelationModal = ({ + table1, + table1Schema, + table2, + table2Schema, + visible, + setModalVisible, + onCancel, + editERModelRelation, + isEditing, + refetch, +}: Props) => { + const [form] = Form.useForm(); + const { user } = useUserContext(); + const ownerEntityType = + user && user.type === EntityType.CorpGroup ? OwnerEntityType.CorpGroup : OwnerEntityType.CorpUser; + const table1Dataset = editERModelRelation?.properties?.source || table1?.dataset; + const table1DatasetSchema = editERModelRelation?.properties?.source || table1Schema; + const table2Dataset = editERModelRelation?.properties?.destination || table2?.dataset; + const table2DatasetSchema = editERModelRelation?.properties?.destination || table2Schema?.dataset; + + const [details, setDetails] = useState(editERModelRelation?.editableProperties?.description || ''); + const [ermodelrelationName, setERModelRelationName] = useState( + editERModelRelation?.editableProperties?.name || + editERModelRelation?.properties?.name || + editERModelRelation?.id || + '', + ); + const [tableData, setTableData] = useState( + editERModelRelation?.properties?.relationshipFieldMappings?.map((item, index) => { + return { + key: index, + field1Name: item.sourceField, + field2Name: item.destinationField, + }; + }) || [ + { key: '0', field1Name: '', field2Name: '' }, + { key: '1', field1Name: '', field2Name: '' }, + ], + ); + const [count, setCount] = useState(editERModelRelation?.properties?.relationshipFieldMappings?.length || 2); + const [createMutation] = useCreateErModelRelationshipMutation(); + const [updateMutation] = useUpdateErModelRelationshipMutation(); + const [addOwnerMutation] = useAddOwnerMutation(); + const { refetch: getSearchResultsERModelRelations } = useGetSearchResultsQuery({ + skip: true, + }); + + const handleDelete = (record) => { + const newData = tableData.filter((item) => item.key !== record.key); + setTableData(newData); + }; + const onCancelSelect = () => { + Modal.confirm({ + title: `Exit`, + className: 'cancel-modal', + content: `Are you sure you want to exit? The changes made to the erModelRelationship will not be applied.`, + onOk() { + setERModelRelationName(editERModelRelation?.properties?.name || ''); + setDetails(editERModelRelation?.editableProperties?.description || ''); + setTableData( + editERModelRelation?.properties?.relationshipFieldMappings?.map((item, index) => { + return { + key: index, + field1Name: item.sourceField, + field2Name: item.destinationField, + }; + }) || [ + { key: '0', field1Name: '', field2Name: '' }, + { key: '1', field1Name: '', field2Name: '' }, + ], + ); + setCount(editERModelRelation?.properties?.relationshipFieldMappings?.length || 2); + onCancel?.(); + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }); + }; + const createERModelRelationship = () => { + createMutation({ + variables: { + input: { + properties: { + source: table1Dataset?.urn || '', + destination: table2Dataset?.urn || '', + name: ermodelrelationName, + relationshipFieldmappings: tableData.map((r) => { + return { + sourceField: r.field1Name, + destinationField: r.field2Name, + }; + }), + created: true, + }, + editableProperties: { + name: ermodelrelationName, + description: details, + }, + }, + }, + }) + .then(({ data }) => { + message.loading({ + content: 'Create...', + duration: 2, + }); + setTimeout(() => { + refetch(); + message.success({ + content: `ERModelRelation created!`, + duration: 2, + }); + }, 2000); + addOwnerMutation({ + variables: { + input: { + ownerUrn: user?.urn || '', + resourceUrn: data?.createERModelRelationship?.urn || '', + ownershipTypeUrn: 'urn:li:ownershipType:__system__technical_owner', + ownerEntityType: ownerEntityType || EntityType, + }, + }, + }); + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to create erModelRelationship: ${e.message || ''}`, duration: 3 }); + }); + }; + const originalERModelRelationName = editERModelRelation?.properties?.name; + const updateERModelRelationship = () => { + updateMutation({ + variables: { + urn: editERModelRelation?.urn || '', + input: { + properties: { + source: table1Dataset?.urn || '', + destination: table2Dataset?.urn || '', + name: originalERModelRelationName || '', + createdBy: editERModelRelation?.properties?.createdActor?.urn || user?.urn, + createdAt: editERModelRelation?.properties?.createdTime || 0, + relationshipFieldmappings: tableData.map((r) => { + return { + sourceField: r.field1Name, + destinationField: r.field2Name, + }; + }), + }, + editableProperties: { + name: ermodelrelationName, + description: details, + }, + }, + }, + }) + .then(() => { + message.loading({ + content: 'updating...', + duration: 2, + }); + setTimeout(() => { + refetch(); + message.success({ + content: `ERModelRelation updated!`, + duration: 2, + }); + }, 2000); + }) + .catch((e) => { + message.destroy(); + message.error({ content: `Failed to update erModelRelationship: ${e.message || ''}`, duration: 3 }); + }); + }; + const onSubmit = async () => { + const errors = validateERModelRelation( + ermodelrelationName, + tableData, + isEditing, + getSearchResultsERModelRelations, + ); + if ((await errors).length > 0) { + const err = (await errors).join(`, `); + message.error(err); + return; + } + if (isEditing) { + updateERModelRelationship(); + } else { + createERModelRelationship(); + setERModelRelationName(''); + setDetails(''); + setTableData([ + { key: '0', field1Name: '', field2Name: '' }, + { key: '1', field1Name: '', field2Name: '' }, + ]); + setCount(2); + } + setModalVisible(false); + }; + + const table1NameBusiness = getDatasetName(table1Dataset); + const table1NameTech = table1Dataset?.name || table1Dataset?.urn.split(',').at(1) || ''; + const table2NameBusiness = getDatasetName(table2Dataset); + const table2NameTech = table2Dataset?.name || table2Dataset?.urn.split(',').at(1) || ''; + + const handleAdd = () => { + const newData: ERModelRelationDataType = { + key: count, + field1Name: '', + field2Name: '', + }; + setTableData([...tableData, newData]); + setCount(count + 1); + }; + const defaultColumns: (ColumnTypes[number] & { editable?: boolean; dataIndex: string; tableRecord?: any })[] = [ + { + title: ( +

+

+ {table1NameBusiness || table1NameTech} +
+
{table1NameTech !== table1NameBusiness && table1NameTech}
+

+ ), + dataIndex: 'field1Name', + tableRecord: table1DatasetSchema || {}, + editable: true, + }, + { + title: '', + dataIndex: '', + editable: false, + render: () => , + }, + { + title: ( +

+

+ {table2NameBusiness || table2NameTech} +
+
{table2NameTech !== table2NameBusiness && table2NameTech}
+

+ ), + dataIndex: 'field2Name', + tableRecord: table2DatasetSchema || {}, + editable: true, + }, + { + title: 'Action', + dataIndex: '', + editable: false, + render: (record) => + tableData.length > 1 ? ( + + ) : null, + }, + ]; + const handleSave = (row: ERModelRelationDataType) => { + const newData = [...tableData]; + const index = newData.findIndex((item) => row.key === item.key); + const item = newData[index]; + newData.splice(index, 1, { + ...item, + ...row, + }); + setTableData(newData); + }; + const components = { + body: { + row: EditableRow, + cell: EditableCell, + }, + }; + + const columns = defaultColumns.map((col) => { + if (!col.editable) { + return col; + } + return { + ...col, + onCell: (record: ERModelRelationDataType) => ({ + record, + editable: col.editable, + dataIndex: col.dataIndex, + tableRecord: col.tableRecord, + title: col.title, + handleSave, + }), + }; + }); + return ( + +

ER-Model-Relationship Parameters

+
+ +
+
+ +
+ + } + visible={visible} + closable={false} + className="CreateERModelRelationModal" + okButtonProps={{ hidden: true }} + cancelButtonProps={{ hidden: true }} + onCancel={onCancelSelect} + destroyOnClose + > +
+

Table 1

+

{table1NameBusiness}

+
{table1NameTech !== table1NameBusiness && table1NameTech}
+

Table 2

+

{table2NameBusiness}

+
{table2NameTech !== table2NameBusiness && table2NameTech}
+

ER-Model-Relationship name

+ + + checkDuplicateERModelRelation(getSearchResultsERModelRelations, value?.trim()).then( + (result) => { + return result === true && !isEditing + ? Promise.reject( + new Error( + 'This ER-Model-Relationship name already exists. A unique name for each ER-Model-Relationship is required.', + ), + ) + : Promise.resolve(); + }, + ), + }, + ]} + > + setERModelRelationName(e.target.value)} + /> + +

Fields

+
+ +

ER-Model-Relationship details

+ +